[ 559.551054] env[61663]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61663) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 559.551454] env[61663]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61663) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 559.551535] env[61663]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61663) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 559.551801] env[61663]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 559.639919] env[61663]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61663) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 559.649918] env[61663]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=61663) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 559.789143] env[61663]: INFO nova.virt.driver [None req-172bc889-0ad0-4062-8483-21060d74e91c None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 559.864808] env[61663]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.865036] env[61663]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.865115] env[61663]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61663) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 563.064701] env[61663]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-e3c53d99-042d-4f51-9bf9-5829c92edcb7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.081028] env[61663]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61663) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 563.081216] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-8f1647df-071b-4d31-858a-3e99df9d1b21 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.108722] env[61663]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 666e0. [ 563.108974] env[61663]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.244s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 563.109457] env[61663]: INFO nova.virt.vmwareapi.driver [None req-172bc889-0ad0-4062-8483-21060d74e91c None None] VMware vCenter version: 7.0.3 [ 563.112877] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5492451f-33a6-4e90-b636-4b35fe069bff {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.130957] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-092c0b2d-5066-490e-9dc4-5dec45cf5cac {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.137597] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8ca7b2-6c51-4783-83c1-33f2597f74f7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.144716] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c342f704-594e-4f02-aa6f-8313e4ee8207 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.159497] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f846275-2b63-44c6-a388-f53c364a7ca6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.165991] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e39884-2b0e-45ce-8d61-53a99587ae71 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.197176] env[61663]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-e1b5ab46-ee2e-4b94-8cdd-4f771264b302 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.203200] env[61663]: DEBUG nova.virt.vmwareapi.driver [None req-172bc889-0ad0-4062-8483-21060d74e91c None None] Extension org.openstack.compute already exists. {{(pid=61663) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 563.205903] env[61663]: INFO nova.compute.provider_config [None req-172bc889-0ad0-4062-8483-21060d74e91c None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 563.226184] env[61663]: DEBUG nova.context [None req-172bc889-0ad0-4062-8483-21060d74e91c None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),46dd700d-eea0-463c-9a5e-53ee6ad56f04(cell1) {{(pid=61663) load_cells /opt/stack/nova/nova/context.py:464}} [ 563.228259] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.228506] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.229180] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 563.229677] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] Acquiring lock "46dd700d-eea0-463c-9a5e-53ee6ad56f04" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.229872] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] Lock "46dd700d-eea0-463c-9a5e-53ee6ad56f04" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.230924] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] Lock "46dd700d-eea0-463c-9a5e-53ee6ad56f04" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 563.251648] env[61663]: INFO dbcounter [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] Registered counter for database nova_cell0 [ 563.260547] env[61663]: INFO dbcounter [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] Registered counter for database nova_cell1 [ 563.263714] env[61663]: DEBUG oslo_db.sqlalchemy.engines [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61663) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 563.264335] env[61663]: DEBUG oslo_db.sqlalchemy.engines [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61663) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 563.268423] env[61663]: DEBUG dbcounter [-] [61663] Writer thread running {{(pid=61663) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 563.270096] env[61663]: DEBUG dbcounter [-] [61663] Writer thread running {{(pid=61663) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 563.271851] env[61663]: ERROR nova.db.main.api [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 563.271851] env[61663]: result = function(*args, **kwargs) [ 563.271851] env[61663]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 563.271851] env[61663]: return func(*args, **kwargs) [ 563.271851] env[61663]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 563.271851] env[61663]: result = fn(*args, **kwargs) [ 563.271851] env[61663]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 563.271851] env[61663]: return f(*args, **kwargs) [ 563.271851] env[61663]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 563.271851] env[61663]: return db.service_get_minimum_version(context, binaries) [ 563.271851] env[61663]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 563.271851] env[61663]: _check_db_access() [ 563.271851] env[61663]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 563.271851] env[61663]: stacktrace = ''.join(traceback.format_stack()) [ 563.271851] env[61663]: [ 563.273057] env[61663]: ERROR nova.db.main.api [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 563.273057] env[61663]: result = function(*args, **kwargs) [ 563.273057] env[61663]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 563.273057] env[61663]: return func(*args, **kwargs) [ 563.273057] env[61663]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 563.273057] env[61663]: result = fn(*args, **kwargs) [ 563.273057] env[61663]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 563.273057] env[61663]: return f(*args, **kwargs) [ 563.273057] env[61663]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 563.273057] env[61663]: return db.service_get_minimum_version(context, binaries) [ 563.273057] env[61663]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 563.273057] env[61663]: _check_db_access() [ 563.273057] env[61663]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 563.273057] env[61663]: stacktrace = ''.join(traceback.format_stack()) [ 563.273057] env[61663]: [ 563.273469] env[61663]: WARNING nova.objects.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 563.273580] env[61663]: WARNING nova.objects.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] Failed to get minimum service version for cell 46dd700d-eea0-463c-9a5e-53ee6ad56f04 [ 563.274055] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] Acquiring lock "singleton_lock" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 563.274221] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] Acquired lock "singleton_lock" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 563.274476] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] Releasing lock "singleton_lock" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 563.274809] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] Full set of CONF: {{(pid=61663) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 563.274951] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ******************************************************************************** {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 563.275093] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] Configuration options gathered from: {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 563.275231] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 563.275419] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 563.275546] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ================================================================================ {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 563.275761] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] allow_resize_to_same_host = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.275931] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] arq_binding_timeout = 300 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.276074] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] backdoor_port = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.276207] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] backdoor_socket = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.276369] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] block_device_allocate_retries = 60 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.276559] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] block_device_allocate_retries_interval = 3 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.276747] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cert = self.pem {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.276916] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.277095] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] compute_monitors = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.277265] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] config_dir = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.277434] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] config_drive_format = iso9660 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.277570] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.277735] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] config_source = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.277901] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] console_host = devstack {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.278083] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] control_exchange = nova {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.278245] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cpu_allocation_ratio = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.278405] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] daemon = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.278602] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] debug = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.278771] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] default_access_ip_network_name = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.278942] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] default_availability_zone = nova {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.279115] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] default_ephemeral_format = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.279276] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] default_green_pool_size = 1000 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.279513] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.279680] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] default_schedule_zone = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.279839] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] disk_allocation_ratio = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.280010] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] enable_new_services = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.280255] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] enabled_apis = ['osapi_compute'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.280366] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] enabled_ssl_apis = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.280530] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] flat_injected = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.280693] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] force_config_drive = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.280852] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] force_raw_images = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.281033] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] graceful_shutdown_timeout = 5 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.281203] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] heal_instance_info_cache_interval = 60 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.281431] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] host = cpu-1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.281610] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.281785] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] initial_disk_allocation_ratio = 1.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.281952] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] initial_ram_allocation_ratio = 1.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.282189] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.282358] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] instance_build_timeout = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.282523] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] instance_delete_interval = 300 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.282701] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] instance_format = [instance: %(uuid)s] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.282868] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] instance_name_template = instance-%08x {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.283045] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] instance_usage_audit = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.283225] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] instance_usage_audit_period = month {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.283392] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.283561] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] instances_path = /opt/stack/data/nova/instances {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.283733] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] internal_service_availability_zone = internal {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.283892] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] key = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.284061] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] live_migration_retry_count = 30 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.284233] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] log_config_append = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.284400] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.284572] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] log_dir = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.284718] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] log_file = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.284848] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] log_options = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.285015] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] log_rotate_interval = 1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.285192] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] log_rotate_interval_type = days {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.285358] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] log_rotation_type = none {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.285491] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.285620] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.285788] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.285955] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.286094] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.286260] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] long_rpc_timeout = 1800 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.286422] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] max_concurrent_builds = 10 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.286610] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] max_concurrent_live_migrations = 1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.286781] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] max_concurrent_snapshots = 5 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.286941] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] max_local_block_devices = 3 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.287115] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] max_logfile_count = 30 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.287276] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] max_logfile_size_mb = 200 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.287434] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] maximum_instance_delete_attempts = 5 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.287602] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] metadata_listen = 0.0.0.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.287771] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] metadata_listen_port = 8775 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.287939] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] metadata_workers = 2 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.288114] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] migrate_max_retries = -1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.288283] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] mkisofs_cmd = genisoimage {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.288531] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] my_block_storage_ip = 10.180.1.21 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.288655] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] my_ip = 10.180.1.21 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.288823] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] network_allocate_retries = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.289015] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.289192] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] osapi_compute_listen = 0.0.0.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.289358] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] osapi_compute_listen_port = 8774 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.289543] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] osapi_compute_unique_server_name_scope = {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.289715] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] osapi_compute_workers = 2 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.289879] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] password_length = 12 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.290053] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] periodic_enable = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.290219] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] periodic_fuzzy_delay = 60 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.290440] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] pointer_model = usbtablet {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.290611] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] preallocate_images = none {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.290899] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] publish_errors = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.291083] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] pybasedir = /opt/stack/nova {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.291268] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ram_allocation_ratio = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.291434] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] rate_limit_burst = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.291606] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] rate_limit_except_level = CRITICAL {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.291770] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] rate_limit_interval = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.291929] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] reboot_timeout = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.292105] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] reclaim_instance_interval = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.292266] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] record = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.292436] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] reimage_timeout_per_gb = 60 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.292601] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] report_interval = 120 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.292804] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] rescue_timeout = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.292960] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] reserved_host_cpus = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.293152] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] reserved_host_disk_mb = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.293430] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] reserved_host_memory_mb = 512 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.293620] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] reserved_huge_pages = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.293800] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] resize_confirm_window = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.293960] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] resize_fs_using_block_device = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.294139] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] resume_guests_state_on_host_boot = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.294313] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.294481] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] rpc_response_timeout = 60 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.294706] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] run_external_periodic_tasks = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.294811] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] running_deleted_instance_action = reap {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.294972] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] running_deleted_instance_poll_interval = 1800 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.295144] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] running_deleted_instance_timeout = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.295304] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] scheduler_instance_sync_interval = 120 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.295498] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] service_down_time = 720 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.295747] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] servicegroup_driver = db {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.296031] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] shelved_offload_time = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.296235] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] shelved_poll_interval = 3600 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.296418] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] shutdown_timeout = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.296617] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] source_is_ipv6 = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.296789] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ssl_only = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.297071] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.297246] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] sync_power_state_interval = 600 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.297413] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] sync_power_state_pool_size = 1000 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.297581] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] syslog_log_facility = LOG_USER {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.297749] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] tempdir = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.297904] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] timeout_nbd = 10 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.298083] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] transport_url = **** {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.298247] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] update_resources_interval = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.298408] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] use_cow_images = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.298607] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] use_eventlog = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.298777] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] use_journal = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.298939] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] use_json = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.299114] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] use_rootwrap_daemon = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.299278] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] use_stderr = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.299440] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] use_syslog = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.299601] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vcpu_pin_set = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.299774] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vif_plugging_is_fatal = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.299945] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vif_plugging_timeout = 300 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.300126] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] virt_mkfs = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.300292] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] volume_usage_poll_interval = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.300455] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] watch_log_file = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.300620] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] web = /usr/share/spice-html5 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 563.300808] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_concurrency.disable_process_locking = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.301135] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.301319] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.301489] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.301675] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.301829] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.301995] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.302218] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api.auth_strategy = keystone {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.302390] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api.compute_link_prefix = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.302570] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.302748] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api.dhcp_domain = novalocal {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.302921] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api.enable_instance_password = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.303098] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api.glance_link_prefix = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.303268] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.303440] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.303603] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api.instance_list_per_project_cells = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.303772] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api.list_records_by_skipping_down_cells = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.303941] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api.local_metadata_per_cell = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.304120] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api.max_limit = 1000 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.304297] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api.metadata_cache_expiration = 15 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.304478] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api.neutron_default_tenant_id = default {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.304650] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api.use_neutron_default_nets = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.304871] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.304983] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.305165] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.305342] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.305516] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api.vendordata_dynamic_targets = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.305682] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api.vendordata_jsonfile_path = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.305862] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.306074] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.backend = dogpile.cache.memcached {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.306250] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.backend_argument = **** {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.306447] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.config_prefix = cache.oslo {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.306657] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.dead_timeout = 60.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.306834] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.debug_cache_backend = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.307011] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.enable_retry_client = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.307189] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.enable_socket_keepalive = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.307366] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.enabled = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.307538] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.enforce_fips_mode = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.307708] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.expiration_time = 600 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.307873] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.hashclient_retry_attempts = 2 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.308057] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.hashclient_retry_delay = 1.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.308227] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.memcache_dead_retry = 300 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.308393] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.memcache_password = **** {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.308593] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.308778] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.308947] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.memcache_pool_maxsize = 10 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.309137] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.309308] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.memcache_sasl_enabled = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.309502] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.309680] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.memcache_socket_timeout = 1.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.309846] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.memcache_username = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.310025] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.proxies = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.310200] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.redis_password = **** {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.310372] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.redis_sentinel_service_name = mymaster {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.310554] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.310766] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.redis_server = localhost:6379 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.310897] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.redis_socket_timeout = 1.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.311070] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.redis_username = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.311240] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.retry_attempts = 2 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.311410] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.retry_delay = 0.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.311576] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.socket_keepalive_count = 1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.311747] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.socket_keepalive_idle = 1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.311909] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.socket_keepalive_interval = 1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.312081] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.tls_allowed_ciphers = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.312254] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.tls_cafile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.312404] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.tls_certfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.312571] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.tls_enabled = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.312733] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cache.tls_keyfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.312910] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cinder.auth_section = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.313101] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cinder.auth_type = password {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.313272] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cinder.cafile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.313454] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cinder.catalog_info = volumev3::publicURL {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.313623] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cinder.certfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.313795] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cinder.collect_timing = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.313962] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cinder.cross_az_attach = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.314143] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cinder.debug = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.314309] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cinder.endpoint_template = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.314478] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cinder.http_retries = 3 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.314648] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cinder.insecure = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.314840] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cinder.keyfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.315045] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cinder.os_region_name = RegionOne {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.315170] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cinder.split_loggers = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.315335] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cinder.timeout = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.315511] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.315676] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] compute.cpu_dedicated_set = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.315836] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] compute.cpu_shared_set = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.316023] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] compute.image_type_exclude_list = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.316187] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.316355] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] compute.max_concurrent_disk_ops = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.316547] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] compute.max_disk_devices_to_attach = -1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.316737] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.316914] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.317095] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] compute.resource_provider_association_refresh = 300 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.317265] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] compute.shutdown_retry_interval = 10 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.317450] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.317633] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] conductor.workers = 2 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.317816] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] console.allowed_origins = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.317981] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] console.ssl_ciphers = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.318169] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] console.ssl_minimum_version = default {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.318345] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] consoleauth.enforce_session_timeout = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.318550] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] consoleauth.token_ttl = 600 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.318736] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cyborg.cafile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.318912] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cyborg.certfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.319076] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cyborg.collect_timing = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.319244] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cyborg.connect_retries = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.319406] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cyborg.connect_retry_delay = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.319569] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cyborg.endpoint_override = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.319737] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cyborg.insecure = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.319897] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cyborg.keyfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.320071] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cyborg.max_version = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.320235] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cyborg.min_version = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.320395] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cyborg.region_name = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.320615] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cyborg.retriable_status_codes = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.320878] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cyborg.service_name = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.321091] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cyborg.service_type = accelerator {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.321271] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cyborg.split_loggers = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.321453] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cyborg.status_code_retries = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.321617] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cyborg.status_code_retry_delay = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.321784] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cyborg.timeout = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.321970] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.322148] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] cyborg.version = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.322334] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] database.backend = sqlalchemy {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.322513] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] database.connection = **** {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.322685] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] database.connection_debug = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.322866] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] database.connection_parameters = {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.323044] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] database.connection_recycle_time = 3600 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.323217] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] database.connection_trace = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.323381] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] database.db_inc_retry_interval = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.323591] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] database.db_max_retries = 20 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.323782] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] database.db_max_retry_interval = 10 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.323955] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] database.db_retry_interval = 1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.324136] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] database.max_overflow = 50 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.324303] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] database.max_pool_size = 5 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.324468] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] database.max_retries = 10 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.324642] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.324804] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] database.mysql_wsrep_sync_wait = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.324962] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] database.pool_timeout = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.325139] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] database.retry_interval = 10 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.325299] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] database.slave_connection = **** {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.325461] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] database.sqlite_synchronous = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.325624] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] database.use_db_reconnect = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.325807] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api_database.backend = sqlalchemy {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.325979] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api_database.connection = **** {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.326161] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api_database.connection_debug = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.326335] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api_database.connection_parameters = {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.326524] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api_database.connection_recycle_time = 3600 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.326709] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api_database.connection_trace = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.326879] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api_database.db_inc_retry_interval = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.327055] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api_database.db_max_retries = 20 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.327227] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api_database.db_max_retry_interval = 10 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.327392] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api_database.db_retry_interval = 1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.327558] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api_database.max_overflow = 50 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.327723] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api_database.max_pool_size = 5 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.327889] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api_database.max_retries = 10 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.328071] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.328236] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.328397] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api_database.pool_timeout = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.328588] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api_database.retry_interval = 10 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.328763] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api_database.slave_connection = **** {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.328931] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] api_database.sqlite_synchronous = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.329126] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] devices.enabled_mdev_types = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.329311] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.329504] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ephemeral_storage_encryption.default_format = luks {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.329693] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ephemeral_storage_encryption.enabled = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.329864] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.330051] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.api_servers = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.330224] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.cafile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.330392] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.certfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.330562] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.collect_timing = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.330728] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.connect_retries = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.330892] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.connect_retry_delay = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.331072] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.debug = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.331288] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.default_trusted_certificate_ids = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.331412] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.enable_certificate_validation = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.331578] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.enable_rbd_download = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.331741] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.endpoint_override = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.331910] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.insecure = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.332083] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.keyfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.332249] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.max_version = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.332411] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.min_version = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.332623] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.num_retries = 3 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.332748] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.rbd_ceph_conf = {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.332913] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.rbd_connect_timeout = 5 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.333094] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.rbd_pool = {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.333267] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.rbd_user = {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.333430] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.region_name = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.333592] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.retriable_status_codes = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.333751] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.service_name = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.333922] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.service_type = image {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.334103] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.split_loggers = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.334270] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.status_code_retries = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.334442] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.status_code_retry_delay = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.334602] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.timeout = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.334790] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.334959] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.verify_glance_signatures = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.335137] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] glance.version = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.335313] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] guestfs.debug = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.335524] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] mks.enabled = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.335943] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.336156] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] image_cache.manager_interval = 2400 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.336335] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] image_cache.precache_concurrency = 1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.336526] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] image_cache.remove_unused_base_images = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.336703] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.336874] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.337066] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] image_cache.subdirectory_name = _base {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.337253] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.api_max_retries = 60 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.337421] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.api_retry_interval = 2 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.337589] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.auth_section = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.337756] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.auth_type = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.337920] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.cafile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.338101] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.certfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.338277] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.collect_timing = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.338442] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.conductor_group = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.338639] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.connect_retries = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.338807] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.connect_retry_delay = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.338968] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.endpoint_override = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.339152] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.insecure = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.339313] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.keyfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.339477] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.max_version = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.339642] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.min_version = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.339813] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.peer_list = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.339972] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.region_name = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.340146] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.retriable_status_codes = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.340314] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.serial_console_state_timeout = 10 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.340475] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.service_name = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.340650] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.service_type = baremetal {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.340812] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.shard = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.340976] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.split_loggers = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.341145] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.status_code_retries = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.341304] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.status_code_retry_delay = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.341463] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.timeout = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.341644] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.341812] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ironic.version = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.341999] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.342203] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] key_manager.fixed_key = **** {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.342378] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.342541] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican.barbican_api_version = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.342767] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican.barbican_endpoint = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.342861] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican.barbican_endpoint_type = public {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.343030] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican.barbican_region_name = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.343198] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican.cafile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.343358] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican.certfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.343525] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican.collect_timing = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.343691] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican.insecure = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.343848] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican.keyfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.344025] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican.number_of_retries = 60 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.344191] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican.retry_delay = 1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.344358] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican.send_service_user_token = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.344525] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican.split_loggers = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.344687] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican.timeout = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.344852] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican.verify_ssl = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.345016] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican.verify_ssl_path = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.345190] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican_service_user.auth_section = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.345407] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican_service_user.auth_type = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.345518] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican_service_user.cafile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.345677] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican_service_user.certfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.345842] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican_service_user.collect_timing = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.346013] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican_service_user.insecure = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.346182] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican_service_user.keyfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.346346] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican_service_user.split_loggers = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.346560] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] barbican_service_user.timeout = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.346750] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vault.approle_role_id = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.346916] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vault.approle_secret_id = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.347094] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vault.cafile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.347260] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vault.certfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.347429] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vault.collect_timing = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.347596] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vault.insecure = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.347758] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vault.keyfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.347940] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vault.kv_mountpoint = secret {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.348114] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vault.kv_path = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.348283] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vault.kv_version = 2 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.348448] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vault.namespace = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.348645] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vault.root_token_id = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.348817] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vault.split_loggers = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.348979] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vault.ssl_ca_crt_file = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.349157] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vault.timeout = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.349329] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vault.use_ssl = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.349510] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.349693] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] keystone.auth_section = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.349861] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] keystone.auth_type = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.350044] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] keystone.cafile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.350213] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] keystone.certfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.350381] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] keystone.collect_timing = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.350544] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] keystone.connect_retries = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.350710] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] keystone.connect_retry_delay = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.350874] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] keystone.endpoint_override = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.351053] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] keystone.insecure = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.351219] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] keystone.keyfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.351380] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] keystone.max_version = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.351610] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] keystone.min_version = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.351695] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] keystone.region_name = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.351851] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] keystone.retriable_status_codes = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.352016] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] keystone.service_name = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.352194] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] keystone.service_type = identity {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.352358] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] keystone.split_loggers = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.352517] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] keystone.status_code_retries = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.352682] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] keystone.status_code_retry_delay = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.352907] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] keystone.timeout = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.353026] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.353191] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] keystone.version = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.353399] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.connection_uri = {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.353564] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.cpu_mode = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.353927] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.cpu_model_extra_flags = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.353927] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.cpu_models = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.354096] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.cpu_power_governor_high = performance {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.354244] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.cpu_power_governor_low = powersave {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.354410] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.cpu_power_management = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.354586] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.354754] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.device_detach_attempts = 8 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.354918] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.device_detach_timeout = 20 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.356339] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.disk_cachemodes = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.356339] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.disk_prefix = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.356339] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.enabled_perf_events = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.356339] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.file_backed_memory = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.356339] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.gid_maps = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.356339] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.hw_disk_discard = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.356598] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.hw_machine_type = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.356598] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.images_rbd_ceph_conf = {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.356598] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.356598] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.356878] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.images_rbd_glance_store_name = {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.356948] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.images_rbd_pool = rbd {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.357111] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.images_type = default {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.357274] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.images_volume_group = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.357526] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.inject_key = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.357606] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.inject_partition = -2 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.357755] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.inject_password = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.357918] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.iscsi_iface = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.358093] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.iser_use_multipath = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.358264] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.live_migration_bandwidth = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.358427] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.358634] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.live_migration_downtime = 500 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.358805] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.358972] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.359148] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.live_migration_inbound_addr = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.359313] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.359476] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.live_migration_permit_post_copy = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.359640] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.live_migration_scheme = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.359823] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.live_migration_timeout_action = abort {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.359992] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.live_migration_tunnelled = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.360171] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.live_migration_uri = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.360338] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.live_migration_with_native_tls = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.360500] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.max_queues = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.360667] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.360916] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.361088] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.nfs_mount_options = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.361399] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.361578] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.361752] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.num_iser_scan_tries = 5 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.361912] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.num_memory_encrypted_guests = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.362087] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.362254] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.num_pcie_ports = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.362424] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.num_volume_scan_tries = 5 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.362656] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.pmem_namespaces = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.362756] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.quobyte_client_cfg = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.363049] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.363226] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.rbd_connect_timeout = 5 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.363394] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.363563] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.363728] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.rbd_secret_uuid = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.363891] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.rbd_user = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.364064] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.364242] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.remote_filesystem_transport = ssh {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.364406] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.rescue_image_id = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.364568] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.rescue_kernel_id = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.364732] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.rescue_ramdisk_id = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.364907] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.365083] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.rx_queue_size = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.365263] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.smbfs_mount_options = {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.365544] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.365726] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.snapshot_compression = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.365892] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.snapshot_image_format = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.366128] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.366302] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.sparse_logical_volumes = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.366482] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.swtpm_enabled = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.366681] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.swtpm_group = tss {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.366854] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.swtpm_user = tss {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.367050] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.sysinfo_serial = unique {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.367325] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.tb_cache_size = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.367532] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.tx_queue_size = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.367713] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.uid_maps = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.367888] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.use_virtio_for_bridges = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.368076] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.virt_type = kvm {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.368254] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.volume_clear = zero {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.368425] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.volume_clear_size = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.368627] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.volume_use_multipath = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.368800] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.vzstorage_cache_path = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.368974] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.369162] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.vzstorage_mount_group = qemu {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.369336] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.vzstorage_mount_opts = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.369511] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.369814] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.369993] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.vzstorage_mount_user = stack {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.370179] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.370360] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.auth_section = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.370538] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.auth_type = password {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.370729] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.cafile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.371025] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.certfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.371288] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.collect_timing = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.371479] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.connect_retries = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.371650] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.connect_retry_delay = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.371829] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.default_floating_pool = public {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.371992] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.endpoint_override = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.372176] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.extension_sync_interval = 600 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.372341] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.http_retries = 3 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.372507] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.insecure = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.372672] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.keyfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.372834] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.max_version = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.373015] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.373188] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.min_version = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.373360] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.ovs_bridge = br-int {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.373528] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.physnets = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.373700] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.region_name = RegionOne {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.373860] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.retriable_status_codes = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.374040] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.service_metadata_proxy = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.374206] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.service_name = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.374380] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.service_type = network {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.374633] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.split_loggers = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.374866] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.status_code_retries = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.375060] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.status_code_retry_delay = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.375231] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.timeout = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.375420] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.375588] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] neutron.version = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.375768] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] notifications.bdms_in_notifications = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.375950] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] notifications.default_level = INFO {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.376148] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] notifications.notification_format = unversioned {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.376318] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] notifications.notify_on_state_change = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.376504] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.376685] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] pci.alias = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.376918] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] pci.device_spec = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.377289] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] pci.report_in_placement = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.377498] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.auth_section = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.377687] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.auth_type = password {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.377868] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.378045] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.cafile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.378217] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.certfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.378387] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.collect_timing = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.378580] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.connect_retries = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.378755] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.connect_retry_delay = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.378926] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.default_domain_id = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.379132] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.default_domain_name = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.379429] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.domain_id = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.379692] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.domain_name = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.379917] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.endpoint_override = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.380130] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.insecure = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.380310] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.keyfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.380475] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.max_version = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.380644] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.min_version = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.380893] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.password = **** {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.381136] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.project_domain_id = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.381337] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.project_domain_name = Default {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.381522] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.project_id = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.381748] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.project_name = service {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.381959] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.region_name = RegionOne {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.382145] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.retriable_status_codes = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.382310] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.service_name = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.382544] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.service_type = placement {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.382743] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.split_loggers = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.382979] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.status_code_retries = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.383122] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.status_code_retry_delay = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.383303] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.system_scope = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.383458] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.timeout = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.383620] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.trust_id = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.383782] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.user_domain_id = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.383992] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.user_domain_name = Default {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.384258] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.user_id = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.384468] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.username = placement {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.384669] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.384838] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] placement.version = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.385037] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] quota.cores = 20 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.385216] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] quota.count_usage_from_placement = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.385394] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.385576] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] quota.injected_file_content_bytes = 10240 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.385791] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] quota.injected_file_path_length = 255 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.386116] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] quota.injected_files = 5 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.386347] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] quota.instances = 10 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.386537] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] quota.key_pairs = 100 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.386715] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] quota.metadata_items = 128 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.386887] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] quota.ram = 51200 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.387070] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] quota.recheck_quota = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.387246] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] quota.server_group_members = 10 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.387415] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] quota.server_groups = 10 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.387591] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.387761] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.388038] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] scheduler.image_metadata_prefilter = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.388302] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.388530] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] scheduler.max_attempts = 3 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.388750] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] scheduler.max_placement_results = 1000 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.388932] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.389117] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] scheduler.query_placement_for_image_type_support = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.389290] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.389474] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] scheduler.workers = 2 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.389663] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.389839] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.390088] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.390344] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.390540] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.390715] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.390891] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.391104] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.391284] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.host_subset_size = 1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.391454] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.391622] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.391886] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.392184] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.isolated_hosts = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.392389] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.isolated_images = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.392567] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.392743] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.392921] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.393136] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.pci_in_placement = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.393274] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.393558] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.393796] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.394043] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.394246] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.394422] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.394591] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.track_instance_changes = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.394780] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.394957] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] metrics.required = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.395140] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] metrics.weight_multiplier = 1.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.395310] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.395478] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] metrics.weight_setting = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.395809] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.395986] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] serial_console.enabled = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.396182] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] serial_console.port_range = 10000:20000 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.396380] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.396573] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.396744] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] serial_console.serialproxy_port = 6083 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.396914] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] service_user.auth_section = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.397161] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] service_user.auth_type = password {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.397378] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] service_user.cafile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.397529] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] service_user.certfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.397700] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] service_user.collect_timing = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.397865] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] service_user.insecure = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.398035] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] service_user.keyfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.398216] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] service_user.send_service_user_token = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.398380] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] service_user.split_loggers = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.398565] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] service_user.timeout = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.398751] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] spice.agent_enabled = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.398933] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] spice.enabled = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.399280] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.399484] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.399660] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] spice.html5proxy_port = 6082 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.399823] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] spice.image_compression = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.399985] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] spice.jpeg_compression = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.400166] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] spice.playback_compression = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.400338] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] spice.server_listen = 127.0.0.1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.400508] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.400722] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] spice.streaming_mode = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.400932] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] spice.zlib_compression = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.401126] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] upgrade_levels.baseapi = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.401310] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] upgrade_levels.compute = auto {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.401492] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] upgrade_levels.conductor = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.401717] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] upgrade_levels.scheduler = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.401912] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vendordata_dynamic_auth.auth_section = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.402155] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vendordata_dynamic_auth.auth_type = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.402330] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vendordata_dynamic_auth.cafile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.402495] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vendordata_dynamic_auth.certfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.402664] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.402828] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vendordata_dynamic_auth.insecure = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.402991] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vendordata_dynamic_auth.keyfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.403170] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.403329] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vendordata_dynamic_auth.timeout = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.403570] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.api_retry_count = 10 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.403658] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.ca_file = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.403833] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.cache_prefix = devstack-image-cache {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.404010] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.cluster_name = testcl1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.404181] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.connection_pool_size = 10 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.404344] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.console_delay_seconds = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.404515] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.datastore_regex = ^datastore.* {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.404742] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.404960] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.host_password = **** {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.405153] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.host_port = 443 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.405332] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.host_username = administrator@vsphere.local {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.405503] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.insecure = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.405666] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.integration_bridge = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.405830] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.maximum_objects = 100 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.405989] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.pbm_default_policy = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.406169] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.pbm_enabled = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.406330] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.pbm_wsdl_location = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.406522] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.406788] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.serial_port_proxy_uri = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.407066] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.serial_port_service_uri = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.407272] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.task_poll_interval = 0.5 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.407518] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.use_linked_clone = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.407630] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.vnc_keymap = en-us {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.407804] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.vnc_port = 5900 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.407976] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vmware.vnc_port_total = 10000 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.408185] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vnc.auth_schemes = ['none'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.408371] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vnc.enabled = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.408725] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.408921] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.409109] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vnc.novncproxy_port = 6080 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.409299] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vnc.server_listen = 127.0.0.1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.409475] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.409640] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vnc.vencrypt_ca_certs = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.409799] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vnc.vencrypt_client_cert = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.409958] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vnc.vencrypt_client_key = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.410159] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.410349] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] workarounds.disable_deep_image_inspection = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.410556] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.410789] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.410969] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.411152] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] workarounds.disable_rootwrap = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.411319] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] workarounds.enable_numa_live_migration = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.411485] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.411649] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.411814] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.411978] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] workarounds.libvirt_disable_apic = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.412152] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.412317] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.412480] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.412644] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.412809] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.412966] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.413141] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.413307] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.413587] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.413654] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.413846] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.414029] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] wsgi.client_socket_timeout = 900 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.414267] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] wsgi.default_pool_size = 1000 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.414475] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] wsgi.keep_alive = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.414653] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] wsgi.max_header_line = 16384 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.414821] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] wsgi.secure_proxy_ssl_header = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.414995] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] wsgi.ssl_ca_file = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.415234] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] wsgi.ssl_cert_file = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.415411] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] wsgi.ssl_key_file = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.415644] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] wsgi.tcp_keepidle = 600 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.415850] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.416031] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] zvm.ca_file = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.416198] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] zvm.cloud_connector_url = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.416535] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.416723] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] zvm.reachable_timeout = 300 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.416912] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_policy.enforce_new_defaults = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.417100] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_policy.enforce_scope = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.417280] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_policy.policy_default_rule = default {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.417464] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.417658] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_policy.policy_file = policy.yaml {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.417813] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.417979] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.418156] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.418317] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.418503] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.418687] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.418864] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.419055] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] profiler.connection_string = messaging:// {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.419230] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] profiler.enabled = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.419402] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] profiler.es_doc_type = notification {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.419572] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] profiler.es_scroll_size = 10000 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.419739] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] profiler.es_scroll_time = 2m {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.419906] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] profiler.filter_error_trace = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.420088] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] profiler.hmac_keys = **** {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.420273] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] profiler.sentinel_service_name = mymaster {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.420573] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] profiler.socket_timeout = 0.1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.420804] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] profiler.trace_requests = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.421146] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] profiler.trace_sqlalchemy = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.421298] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] profiler_jaeger.process_tags = {} {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.421373] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] profiler_jaeger.service_name_prefix = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.421511] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] profiler_otlp.service_name_prefix = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.421683] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] remote_debug.host = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.421844] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] remote_debug.port = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.422038] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.422210] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.422375] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.422538] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.422704] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.422866] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.423041] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.423198] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.423359] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.423536] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.423703] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.423980] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.424049] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.424225] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.424397] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.424566] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.424733] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.424908] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.425083] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.425260] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.425427] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.425593] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.425761] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.425927] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.426101] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.426267] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.426431] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.426634] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.426809] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.426976] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.ssl = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.427166] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.427339] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.427510] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.427683] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.427852] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.ssl_version = {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.428018] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.428218] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.428386] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_notifications.retry = -1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.428608] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.428791] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_messaging_notifications.transport_url = **** {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.428968] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_limit.auth_section = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.429154] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_limit.auth_type = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.429319] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_limit.cafile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.429493] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_limit.certfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.429652] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_limit.collect_timing = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.429812] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_limit.connect_retries = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.429972] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_limit.connect_retry_delay = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.430145] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_limit.endpoint_id = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.430306] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_limit.endpoint_override = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.430469] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_limit.insecure = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.430629] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_limit.keyfile = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.430786] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_limit.max_version = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.430941] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_limit.min_version = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.431107] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_limit.region_name = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.431269] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_limit.retriable_status_codes = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.431426] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_limit.service_name = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.431582] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_limit.service_type = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.431747] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_limit.split_loggers = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.431905] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_limit.status_code_retries = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.432350] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_limit.status_code_retry_delay = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.432350] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_limit.timeout = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.432440] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_limit.valid_interfaces = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.432564] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_limit.version = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.432698] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_reports.file_event_handler = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.432865] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.433033] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] oslo_reports.log_dir = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.433212] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.433374] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.433535] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.433704] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.433868] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.434102] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.434265] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.434368] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vif_plug_ovs_privileged.group = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.434524] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.434689] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.434853] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.435015] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] vif_plug_ovs_privileged.user = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.435196] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] os_vif_linux_bridge.flat_interface = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.435383] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.435559] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.435736] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.435910] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.436089] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.436261] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.436427] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.436634] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.436822] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] os_vif_ovs.isolate_vif = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.437010] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.437191] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.437376] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.437548] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] os_vif_ovs.ovsdb_interface = native {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.437722] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] os_vif_ovs.per_port_bridge = False {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.437913] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] os_brick.lock_path = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.438071] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.438241] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.438415] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] privsep_osbrick.capabilities = [21] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.438606] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] privsep_osbrick.group = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.438775] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] privsep_osbrick.helper_command = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.438943] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.439121] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.439282] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] privsep_osbrick.user = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.439458] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.439620] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] nova_sys_admin.group = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.439778] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] nova_sys_admin.helper_command = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.439940] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.440113] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.440272] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] nova_sys_admin.user = None {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 563.440400] env[61663]: DEBUG oslo_service.service [None req-1b9908f4-bc9d-4d5a-81f4-2688269d9030 None None] ******************************************************************************** {{(pid=61663) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 563.440861] env[61663]: INFO nova.service [-] Starting compute node (version 0.1.0) [ 563.452367] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Getting list of instances from cluster (obj){ [ 563.452367] env[61663]: value = "domain-c8" [ 563.452367] env[61663]: _type = "ClusterComputeResource" [ 563.452367] env[61663]: } {{(pid=61663) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 563.454013] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b8206f-8e92-47df-b17e-43c8a5c21b06 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.464225] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Got total of 0 instances {{(pid=61663) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 563.464869] env[61663]: WARNING nova.virt.vmwareapi.driver [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 563.465382] env[61663]: INFO nova.virt.node [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Generated node identity b47d006d-a9bd-461e-a5d9-39811f005278 [ 563.465652] env[61663]: INFO nova.virt.node [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Wrote node identity b47d006d-a9bd-461e-a5d9-39811f005278 to /opt/stack/data/n-cpu-1/compute_id [ 563.479142] env[61663]: WARNING nova.compute.manager [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Compute nodes ['b47d006d-a9bd-461e-a5d9-39811f005278'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 563.520842] env[61663]: INFO nova.compute.manager [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 563.550831] env[61663]: WARNING nova.compute.manager [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 563.551235] env[61663]: DEBUG oslo_concurrency.lockutils [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.551573] env[61663]: DEBUG oslo_concurrency.lockutils [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.551830] env[61663]: DEBUG oslo_concurrency.lockutils [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 563.552102] env[61663]: DEBUG nova.compute.resource_tracker [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 563.553747] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4829ebfb-ab70-448d-98bf-aa7b1b97f521 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.565785] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8c183fe-3004-40af-a811-8d72d3e3d152 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.581264] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a4f91e4-3a71-4edf-a255-ec20f05d6b5a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.588276] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d396c34-1a58-4eb1-9eff-a83ba5f89dbc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.620566] env[61663]: DEBUG nova.compute.resource_tracker [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181251MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 563.620742] env[61663]: DEBUG oslo_concurrency.lockutils [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 563.621485] env[61663]: DEBUG oslo_concurrency.lockutils [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 563.634299] env[61663]: WARNING nova.compute.resource_tracker [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] No compute node record for cpu-1:b47d006d-a9bd-461e-a5d9-39811f005278: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host b47d006d-a9bd-461e-a5d9-39811f005278 could not be found. [ 563.651510] env[61663]: INFO nova.compute.resource_tracker [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: b47d006d-a9bd-461e-a5d9-39811f005278 [ 563.709270] env[61663]: DEBUG nova.compute.resource_tracker [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 563.709467] env[61663]: DEBUG nova.compute.resource_tracker [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 563.824629] env[61663]: INFO nova.scheduler.client.report [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] [req-f353d0cd-0bfd-4892-bff2-c4f5f1f2801c] Created resource provider record via placement API for resource provider with UUID b47d006d-a9bd-461e-a5d9-39811f005278 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 563.848366] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-143264df-20c0-431e-a181-8d9a4d9ba0f6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.856820] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d96a53f7-f927-488d-a99d-c72081c54085 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.887530] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f3c62b6-d532-41fa-b779-e951d8d85740 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.895709] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c94dede1-2873-4f25-b950-6b140544a22b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.909763] env[61663]: DEBUG nova.compute.provider_tree [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Updating inventory in ProviderTree for provider b47d006d-a9bd-461e-a5d9-39811f005278 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 563.947557] env[61663]: DEBUG nova.scheduler.client.report [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Updated inventory for provider b47d006d-a9bd-461e-a5d9-39811f005278 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 563.947811] env[61663]: DEBUG nova.compute.provider_tree [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Updating resource provider b47d006d-a9bd-461e-a5d9-39811f005278 generation from 0 to 1 during operation: update_inventory {{(pid=61663) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 563.947956] env[61663]: DEBUG nova.compute.provider_tree [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Updating inventory in ProviderTree for provider b47d006d-a9bd-461e-a5d9-39811f005278 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 564.001570] env[61663]: DEBUG nova.compute.provider_tree [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Updating resource provider b47d006d-a9bd-461e-a5d9-39811f005278 generation from 1 to 2 during operation: update_traits {{(pid=61663) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 564.020179] env[61663]: DEBUG nova.compute.resource_tracker [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 564.020385] env[61663]: DEBUG oslo_concurrency.lockutils [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.399s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.020556] env[61663]: DEBUG nova.service [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Creating RPC server for service compute {{(pid=61663) start /opt/stack/nova/nova/service.py:182}} [ 564.036517] env[61663]: DEBUG nova.service [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] Join ServiceGroup membership for this service compute {{(pid=61663) start /opt/stack/nova/nova/service.py:199}} [ 564.036726] env[61663]: DEBUG nova.servicegroup.drivers.db [None req-7c746401-d862-4298-9436-dd12a7ef72cb None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61663) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 573.270978] env[61663]: DEBUG dbcounter [-] [61663] Writing DB stats nova_cell0:SELECT=1 {{(pid=61663) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 573.272768] env[61663]: DEBUG dbcounter [-] [61663] Writing DB stats nova_cell1:SELECT=1 {{(pid=61663) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 577.041977] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._sync_power_states {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 577.052663] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Getting list of instances from cluster (obj){ [ 577.052663] env[61663]: value = "domain-c8" [ 577.052663] env[61663]: _type = "ClusterComputeResource" [ 577.052663] env[61663]: } {{(pid=61663) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 577.053746] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e84cae5-db22-4440-8cf2-921abed1892e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.062673] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Got total of 0 instances {{(pid=61663) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 577.062892] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 577.063206] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Getting list of instances from cluster (obj){ [ 577.063206] env[61663]: value = "domain-c8" [ 577.063206] env[61663]: _type = "ClusterComputeResource" [ 577.063206] env[61663]: } {{(pid=61663) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 577.064032] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d569b22a-62a9-478b-a94f-9d571091f64c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.071567] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Got total of 0 instances {{(pid=61663) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 619.700481] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.700845] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.701049] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 619.701132] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 619.711444] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 619.711621] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.711836] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.712035] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.712552] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.712771] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.712961] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.713138] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 619.713286] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 619.724930] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.725151] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.725317] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 619.725472] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 619.726642] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84d5867-1a75-4952-9641-bccae11694f8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.736310] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d05af666-0e3f-4782-8d69-44bdceb43bc2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.750224] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c80bdca8-03c4-430d-999e-33bc54e58043 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.756530] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-401dc953-32d0-4cfa-b32d-9ecbbba2b9ba {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.785285] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181261MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 619.785428] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.785692] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.816791] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 619.817045] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 619.833506] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb03e63e-59b0-4981-a5a8-006c6119c7ca {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.841014] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d1b107-6c94-409a-a713-31d1985b2bda {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.870601] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb3571b-611f-41a3-aa16-592d85703694 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.877183] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e579800-b1c1-48ba-a915-45559420c93d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.889614] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 619.897526] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 619.898775] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 619.898936] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.113s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 679.885762] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 679.898340] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 679.898340] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 679.898340] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 679.905673] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 679.905886] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 679.906070] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 679.906233] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 680.692308] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 680.692583] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 680.692779] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 680.692986] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 680.703529] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 680.703734] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 680.703899] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.704065] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 680.705175] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07244f43-fb9a-47f5-8008-195d35b51aab {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.713615] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c32ff8f8-3eb5-4812-94b5-661d22c39e80 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.726898] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c45b6423-b9e0-4bdf-b8cd-6b8b75711dc6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.732991] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4520a7-ab57-48dc-bfad-18d4ea2f5958 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.761269] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181246MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 680.761413] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 680.761576] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 680.790121] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 680.790288] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 680.804215] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e63d6dd-343f-43a7-a3f9-d6da4877804a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.811107] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e378f5-4416-448b-9afb-8d22b1e82756 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.839814] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edab1754-e0a2-465d-a67e-34dfdcf57aad {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.846639] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-023f1655-fac4-47e8-97a8-a67fa86dabd0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.859484] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 680.867648] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 680.868844] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 680.869042] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.107s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.869696] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 681.870121] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 740.688429] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 740.692105] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 740.692271] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 740.692399] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 740.702173] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 740.702360] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 740.702523] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 741.691841] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 741.692213] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 741.692213] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 741.692384] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 741.703443] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.703650] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.703825] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.703980] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 741.705138] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7830855a-e929-440c-91dc-2dcfa1221b78 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.713701] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19b0b13-b3ef-4778-ab7a-c5cfcd065cdf {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.727020] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5927aef4-2c78-462c-ab55-aba4effcdbde {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.732931] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b8a93e-eb9c-4ddc-aca8-7b9df8b4405c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.761695] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181328MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 741.761856] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 741.762011] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.793210] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 741.793371] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 741.806244] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c11c6e18-ea67-4a33-a936-b6296f1a6232 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.813320] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c309a4f-c1e6-4e64-b29a-0696ba4c436b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.842799] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e63b0a5a-2e21-482d-ae42-c699509ebb52 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.849932] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b5fcfdc-edcc-4366-9567-c1426eb95a91 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.862656] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 741.871053] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 741.872268] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 741.872444] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.110s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.873280] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 742.873571] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 800.688071] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.691752] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.692716] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 802.688541] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 802.700280] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 802.700280] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 802.700280] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 802.706135] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 802.706329] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 802.706491] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 803.691916] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 803.692212] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 803.702949] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.703437] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.703609] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.703881] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 803.705697] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07205eb6-9d21-4b9d-9151-f37321a9828c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.719167] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f2b8a5-8d6c-46fc-aaf2-3f65d3b40dfd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.742563] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d940976-4f51-4b1d-adb4-682747d19c7e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.753016] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab3536e-fdb7-42c9-876a-4a1aa773d237 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.804159] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181320MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 803.804433] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.804763] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.840414] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 803.840621] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 803.854757] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc96caf-4207-4d6b-9027-7a21b8824858 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.862493] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f25c13d-6298-4b02-9a07-e230c8f68181 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.893132] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d628757c-5ccf-41b5-a705-575170dfa201 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.900814] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ce820c-1b7c-49dc-a4cc-e5a8d076320e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.914278] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 803.923164] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 803.924408] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 803.924581] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.120s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.925162] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 804.925521] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 859.692694] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 859.693209] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Cleaning up deleted instances {{(pid=61663) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 859.705952] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] There are 0 instances to clean {{(pid=61663) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 859.706172] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 859.706312] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Cleaning up deleted instances with incomplete migration {{(pid=61663) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 859.714859] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 861.718750] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 861.719167] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 862.692229] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 862.692474] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 863.692730] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 863.693266] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 863.693266] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 863.701745] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 863.701949] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 864.692643] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 864.702293] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.702550] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.702659] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.702811] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 864.703911] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63cff74-1d74-4e32-bde2-e8c4d0f92227 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.713296] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb61f435-de37-413a-a755-cf63cc6a6b4c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.726748] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4182d214-ba4f-43a1-aaa1-8b4eb231493f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.732635] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ccff06-a3c3-413f-852f-f4240f321280 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.760538] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181324MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 864.760689] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.760857] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.823362] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 864.823546] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 864.840681] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Refreshing inventories for resource provider b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 864.854567] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Updating ProviderTree inventory for provider b47d006d-a9bd-461e-a5d9-39811f005278 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 864.854808] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Updating inventory in ProviderTree for provider b47d006d-a9bd-461e-a5d9-39811f005278 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 864.868048] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Refreshing aggregate associations for resource provider b47d006d-a9bd-461e-a5d9-39811f005278, aggregates: None {{(pid=61663) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 864.885222] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Refreshing trait associations for resource provider b47d006d-a9bd-461e-a5d9-39811f005278, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61663) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 864.898370] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a9a7444-1bb0-4ab4-bd42-0469a5b9f02a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.905717] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-422ab07d-573e-4046-b1f1-d99e865af0ec {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.936509] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa4505a-1a32-4e32-92c0-788085470539 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.943749] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9616fed2-5be6-4391-96c9-201ddc3f7dcb {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.958929] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.970212] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 864.971649] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 864.971864] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.211s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.971877] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.692834] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.693046] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 921.689663] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 921.691307] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 923.692481] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 923.692868] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 923.692868] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 923.702096] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 923.702297] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 924.692598] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 925.691694] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 925.691992] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 925.701856] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.702202] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.702319] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.702514] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 925.703734] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7fd8760-c47a-4833-91c5-f29e6eafc0f5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.712786] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-038558a6-cde6-49ff-9af9-3bb5e386c5cc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.727853] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c4fab8-dd44-4a0e-8071-589f8aa3335e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.734336] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dce69f5-f248-40a0-9ada-773eea87751d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.766720] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181322MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 925.766894] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.767132] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.797495] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 925.797695] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 925.811188] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db207147-b7a4-486e-a73c-561faa73eb53 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.817811] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8c1b7bb-4a33-4d6a-9cda-d4a7daffb861 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.846242] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd795c6-5cda-4657-a1f0-83c6f9de0858 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.852969] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf03b87-a825-4684-9ee3-7522d03bac67 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.866655] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 925.874266] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 925.875486] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 925.875652] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.109s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.872201] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 926.883421] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 926.883421] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 927.692281] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 981.690049] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 981.691590] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 983.692695] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 983.693268] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 983.693268] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 983.702494] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 984.692713] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.693014] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 985.703054] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.703297] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.703442] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 985.703591] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 985.704748] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da5555cc-1d0f-4d09-b3e1-c694b7477e89 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.713697] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba46728-e424-4f20-81b6-fa024483dbe4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.727484] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d179dc5f-9efb-4f5b-a859-cc7d012c5104 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.734082] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f25db5bf-712c-4303-941a-5ad07d7714c2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.763459] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181325MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 985.763618] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 985.763781] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 985.793319] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 985.793478] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 985.806298] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df87bc5-a7cc-44b1-b2c6-e10e81f11143 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.813856] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fabec38-a659-4960-a352-6703b16711e7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.842789] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45904da8-e8ba-4a29-896b-df6370bfc96a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.849643] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e462b2a5-7245-4bd5-b095-804324173c82 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.862643] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 985.870427] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 985.871644] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 985.871813] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.108s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.871552] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.871872] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 988.692649] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 988.693147] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 989.692747] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1041.693985] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1042.688769] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1043.691786] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1043.692205] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1043.692205] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1043.700970] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1045.692278] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1046.692962] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1046.703466] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.703694] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.703863] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1046.704029] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1046.705175] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a57e2541-d307-4048-ac7d-712c97e4d1ce {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.714059] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0301095-7871-479a-904e-0f3798dec159 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.727692] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a34667fb-cebc-412e-9a7b-f59fa4c6012f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.734241] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493fac24-4f08-49bc-b6cf-4a44244bcf22 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.762895] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181324MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1046.763012] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.763697] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.794881] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1046.795062] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1046.807957] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aaa6ae3-92f7-41bd-bc45-99723aba3589 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.814951] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4114a860-ec48-472c-b7ff-3815a9e2ed8b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.845634] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e1ec16c-115d-4e3f-aa93-78f2731b8424 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.852722] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4247f9fa-588d-47ec-aab6-7a52d2f0c381 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.865277] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1046.873120] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1046.874321] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1046.874489] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.111s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1047.874240] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1047.874592] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1048.688452] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1048.698727] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1048.698727] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1049.692126] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.687998] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1102.691633] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1103.692711] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1103.693175] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1103.693175] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1103.703264] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1106.693027] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.693394] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1106.703331] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1106.703553] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1106.703712] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1106.703862] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1106.705016] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba70beeb-57ea-4cc8-ab2f-c082cc14871c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.713741] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7abe631-59f1-4d5e-8312-4dcf31c88e81 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.727406] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bb1f6ed-9ef3-44bf-b910-43c4d1baf523 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.733479] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a0e7e1-e2e4-464a-98cd-efdd59378e1e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.761306] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181326MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1106.761451] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1106.761641] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1106.792216] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1106.792384] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1106.804974] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-153a0a76-23b3-4fd6-81b0-17d4a6387ecf {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.812846] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2e0052f-5b07-4a12-89e4-2d87369c4749 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.842392] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8051c3d1-e514-485a-b8d7-009ab9d2cb2a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.849487] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c53eb6a2-8d35-467b-ba64-c9d0ce42dc00 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.862125] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1106.870283] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1106.871439] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1106.871637] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.110s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1107.870464] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1108.691544] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1108.691767] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1108.691913] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1111.693139] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1162.695084] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1162.695519] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Cleaning up deleted instances {{(pid=61663) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1162.704515] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] There are 0 instances to clean {{(pid=61663) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1163.701950] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1163.702267] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1163.702308] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1163.711332] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1163.711567] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1164.697941] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.692710] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1166.702580] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1166.702797] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.702972] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.703141] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1166.704244] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-685adcf9-4967-436b-83a8-472d0c43adfe {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.712819] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b908d9-6553-4858-a227-2a8a8eee65c9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.727301] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84357d9f-5405-4c50-ace1-340fe5216a18 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.733398] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46aa1f3c-bd26-4e46-94b4-97f1ee20e606 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.761895] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181322MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1166.762047] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1166.762210] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1166.859172] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1166.859356] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1166.875676] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Refreshing inventories for resource provider b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1166.887245] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Updating ProviderTree inventory for provider b47d006d-a9bd-461e-a5d9-39811f005278 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1166.887421] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Updating inventory in ProviderTree for provider b47d006d-a9bd-461e-a5d9-39811f005278 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1166.896734] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Refreshing aggregate associations for resource provider b47d006d-a9bd-461e-a5d9-39811f005278, aggregates: None {{(pid=61663) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1166.911475] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Refreshing trait associations for resource provider b47d006d-a9bd-461e-a5d9-39811f005278, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61663) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1166.922744] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd13595-ec44-410c-88a0-f6281c233305 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.929775] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab85f9e-06c4-4057-a73d-ddb2699030a2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.958736] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc2920f-df16-47d1-9963-6d77d4378706 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.965664] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84621e2b-8211-476e-bae3-512d3447efda {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.979301] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1166.988197] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1166.989388] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1166.989563] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.227s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1167.989354] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1168.692321] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1168.692742] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1168.692742] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1169.688665] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.699523] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.699523] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Cleaning up deleted instances with incomplete migration {{(pid=61663) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1170.701346] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1171.692602] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.692828] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1177.052018] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._sync_power_states {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1177.060977] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Getting list of instances from cluster (obj){ [ 1177.060977] env[61663]: value = "domain-c8" [ 1177.060977] env[61663]: _type = "ClusterComputeResource" [ 1177.060977] env[61663]: } {{(pid=61663) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1177.062301] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c31a8e77-6dc7-43a8-b341-21f7011c3be9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.070847] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Got total of 0 instances {{(pid=61663) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1224.711628] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1224.712106] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1224.712106] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1224.729151] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1224.729434] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1225.504614] env[61663]: DEBUG oslo_concurrency.lockutils [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Acquiring lock "7f516750-b7ee-471b-a386-b898aac3985c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.504934] env[61663]: DEBUG oslo_concurrency.lockutils [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Lock "7f516750-b7ee-471b-a386-b898aac3985c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1225.538948] env[61663]: DEBUG nova.compute.manager [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1225.666767] env[61663]: DEBUG oslo_concurrency.lockutils [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1225.667199] env[61663]: DEBUG oslo_concurrency.lockutils [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1225.669314] env[61663]: INFO nova.compute.claims [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1225.708563] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1225.856372] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7947435a-c1d8-4893-b561-5dd82e2ed682 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.865361] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-924e5796-9951-433a-a74a-10518274eefe {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.905200] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a630a70f-bdba-470e-9d76-d7a395b99210 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.914451] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3ab1a87-9791-4d4d-8232-c4437910942d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.937421] env[61663]: DEBUG nova.compute.provider_tree [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1225.953931] env[61663]: DEBUG nova.scheduler.client.report [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1225.978982] env[61663]: DEBUG oslo_concurrency.lockutils [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.310s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1225.978982] env[61663]: DEBUG nova.compute.manager [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1226.030635] env[61663]: DEBUG nova.compute.utils [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1226.032819] env[61663]: DEBUG nova.compute.manager [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Not allocating networking since 'none' was specified. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 1226.045595] env[61663]: DEBUG nova.compute.manager [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1226.141098] env[61663]: DEBUG nova.compute.manager [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1227.022739] env[61663]: DEBUG nova.virt.hardware [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1227.023117] env[61663]: DEBUG nova.virt.hardware [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1227.023187] env[61663]: DEBUG nova.virt.hardware [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1227.023334] env[61663]: DEBUG nova.virt.hardware [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1227.023476] env[61663]: DEBUG nova.virt.hardware [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1227.023624] env[61663]: DEBUG nova.virt.hardware [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1227.023833] env[61663]: DEBUG nova.virt.hardware [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1227.023986] env[61663]: DEBUG nova.virt.hardware [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1227.024288] env[61663]: DEBUG nova.virt.hardware [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1227.024446] env[61663]: DEBUG nova.virt.hardware [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1227.024618] env[61663]: DEBUG nova.virt.hardware [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1227.025529] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4e169c-86c8-49c3-b56b-c02b7f48c35e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.039266] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f423f0a-8fbc-4082-b457-61e51783820d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.059538] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263a700f-1df8-4b8c-b920-ab0ef133b709 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.079675] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Instance VIF info [] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1227.089649] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1227.089649] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8d030e5c-63c8-40f1-abe5-b4936845f15f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.103555] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Created folder: OpenStack in parent group-v4. [ 1227.103948] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Creating folder: Project (9391bffa5b674f728bbc6a25b211ff52). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1227.104039] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e1f26c6-6937-4ddf-9816-a9638a9503d7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.115643] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Created folder: Project (9391bffa5b674f728bbc6a25b211ff52) in parent group-v352575. [ 1227.115770] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Creating folder: Instances. Parent ref: group-v352576. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1227.115924] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5782291c-7b63-430a-b54b-4815c18878ae {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.127471] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Created folder: Instances in parent group-v352576. [ 1227.127471] env[61663]: DEBUG oslo.service.loopingcall [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1227.127471] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1227.127471] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3d768350-db1d-4289-81a9-deed09a8ca98 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.150250] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1227.150250] env[61663]: value = "task-1690661" [ 1227.150250] env[61663]: _type = "Task" [ 1227.150250] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.161728] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690661, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.371091] env[61663]: DEBUG oslo_concurrency.lockutils [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Acquiring lock "2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1227.371091] env[61663]: DEBUG oslo_concurrency.lockutils [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Lock "2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.405573] env[61663]: DEBUG nova.compute.manager [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1227.485113] env[61663]: DEBUG oslo_concurrency.lockutils [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1227.485417] env[61663]: DEBUG oslo_concurrency.lockutils [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.486958] env[61663]: INFO nova.compute.claims [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1227.621459] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-154e664d-f373-47e5-81d1-3042125be13a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.637797] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2365341-7f55-4c99-acdb-f5b3ffb5c73a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.682826] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d39cff8-4bb2-4258-861a-3f251f13af50 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.694151] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1227.698883] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690661, 'name': CreateVM_Task, 'duration_secs': 0.249683} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.701085] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1227.702592] env[61663]: DEBUG oslo_vmware.service [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-502523a5-9b5e-40ee-b125-e71eec451bc0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.707681] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1227.711137] env[61663]: DEBUG oslo_concurrency.lockutils [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1227.711316] env[61663]: DEBUG oslo_concurrency.lockutils [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.712156] env[61663]: DEBUG oslo_concurrency.lockutils [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1227.714531] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10faba0d-dd69-4d04-b499-574690c9ee84 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.719445] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade52d29-bbac-4eaa-a663-02deba7ebbb7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.726088] env[61663]: DEBUG oslo_vmware.api [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Waiting for the task: (returnval){ [ 1227.726088] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]523a39c1-582e-00ae-fbf9-35924832b3b9" [ 1227.726088] env[61663]: _type = "Task" [ 1227.726088] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.740404] env[61663]: DEBUG nova.compute.provider_tree [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1227.751476] env[61663]: DEBUG oslo_concurrency.lockutils [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1227.752146] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1227.752676] env[61663]: DEBUG oslo_concurrency.lockutils [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1227.753067] env[61663]: DEBUG oslo_concurrency.lockutils [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.753935] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1227.754393] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0869eef7-08f2-4bd1-a0ff-fa85ba973fe5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.760033] env[61663]: DEBUG nova.scheduler.client.report [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1227.781856] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1227.781856] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1227.782755] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3075c3b-1c32-4bac-8fb6-b1cc17cfef90 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.788610] env[61663]: DEBUG oslo_concurrency.lockutils [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.302s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.788610] env[61663]: DEBUG nova.compute.manager [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1227.790549] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.083s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.790750] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1227.790957] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1227.792514] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f73265-a9c8-4c9c-b503-fdcae1f006ce {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.801531] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6390e1dd-30cd-433c-98a1-1a59468d6467 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.807995] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8ea40e-0751-41e0-ace4-5c8e6f7eefda {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.817783] env[61663]: DEBUG oslo_vmware.api [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Waiting for the task: (returnval){ [ 1227.817783] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522c5b03-bdb6-caae-49da-7259f4ecceb8" [ 1227.817783] env[61663]: _type = "Task" [ 1227.817783] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.836352] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ccbc49-742c-44a6-b42d-915d832270a2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.843183] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1227.844521] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Creating directory with path [datastore1] vmware_temp/8fd37d02-df6f-4b52-af5d-e9af8deee719/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1227.844521] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-121d8b2f-428c-49fd-93f4-226754be5b97 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.848607] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee311573-c605-4458-ad98-308e48f2e737 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.881394] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181301MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1227.881473] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1227.881663] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.887751] env[61663]: DEBUG nova.compute.utils [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1227.887751] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Created directory with path [datastore1] vmware_temp/8fd37d02-df6f-4b52-af5d-e9af8deee719/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1227.889984] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Fetch image to [datastore1] vmware_temp/8fd37d02-df6f-4b52-af5d-e9af8deee719/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1227.889984] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/8fd37d02-df6f-4b52-af5d-e9af8deee719/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1227.889984] env[61663]: DEBUG nova.compute.manager [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1227.889984] env[61663]: DEBUG nova.network.neutron [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1227.894172] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd2aca3-c40f-44d0-a064-68a537073668 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.903037] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba94300b-e529-4614-a266-444f6cb941f7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.919038] env[61663]: DEBUG nova.compute.manager [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1227.923470] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c5c2a5-ac49-475d-ba9f-413907cf64f3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.962081] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-603f2f52-41b0-4589-b96b-c9cb76ae8da9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.969904] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-071d60bc-c87c-45c2-bc28-33016b955966 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.992079] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 7f516750-b7ee-471b-a386-b898aac3985c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1227.992256] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1227.992432] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1227.992572] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=768MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1228.002807] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1228.061621] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d18ea20-9e4c-4196-a6fb-ecd47c78946a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.076041] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b97b9f31-072d-4264-9d22-3c7d22fb51b8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.082936] env[61663]: DEBUG nova.compute.manager [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1228.113218] env[61663]: DEBUG oslo_vmware.rw_handles [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8fd37d02-df6f-4b52-af5d-e9af8deee719/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1228.114083] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0b9295-e386-41a2-b9a4-64b4d526617f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.182081] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d2168c-a8ad-4c89-bac2-d565dd1d43d8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.189159] env[61663]: DEBUG nova.virt.hardware [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1228.189414] env[61663]: DEBUG nova.virt.hardware [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1228.189576] env[61663]: DEBUG nova.virt.hardware [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1228.189793] env[61663]: DEBUG nova.virt.hardware [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1228.190294] env[61663]: DEBUG nova.virt.hardware [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1228.190402] env[61663]: DEBUG nova.virt.hardware [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1228.190719] env[61663]: DEBUG nova.virt.hardware [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1228.190774] env[61663]: DEBUG nova.virt.hardware [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1228.190945] env[61663]: DEBUG nova.virt.hardware [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1228.191133] env[61663]: DEBUG nova.virt.hardware [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1228.191343] env[61663]: DEBUG nova.virt.hardware [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1228.191866] env[61663]: DEBUG oslo_vmware.rw_handles [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1228.192114] env[61663]: DEBUG oslo_vmware.rw_handles [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8fd37d02-df6f-4b52-af5d-e9af8deee719/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1228.193666] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b93c8a67-1dd8-4da7-9190-753398af53ed {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.211277] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c128c24-0cc0-4ae9-a371-a421066753ca {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.216566] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1228.230604] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1228.262073] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1228.262073] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.378s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1228.475357] env[61663]: DEBUG nova.policy [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ddf8d0d56c4e49fbad63565de6f5a394', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f4e8a66ac7aa485d98562fbe292efdb6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1229.259875] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1229.260252] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1229.691544] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1229.691858] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1230.346372] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Acquiring lock "667bfd5b-1331-4ff4-93ee-eaa7c7cc4246" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1230.346372] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Lock "667bfd5b-1331-4ff4-93ee-eaa7c7cc4246" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.367380] env[61663]: DEBUG nova.compute.manager [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1230.473910] env[61663]: DEBUG nova.network.neutron [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Successfully created port: 5264d767-9664-4859-bf5f-9ac20b39a883 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1230.493621] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Acquiring lock "29bee3d3-a6d2-43a9-8439-d5b842214cf1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1230.493864] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Lock "29bee3d3-a6d2-43a9-8439-d5b842214cf1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.498526] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1230.498526] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.500063] env[61663]: INFO nova.compute.claims [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1230.508872] env[61663]: DEBUG nova.compute.manager [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1230.600284] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1230.631943] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f5d5f9d-28c2-4262-a4f1-c7219b07b4d2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.640227] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a858a3-6628-40fd-ab2d-78940c0c4b6d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.691014] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a32730-2c41-4344-ba0c-987a84a22030 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.694253] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1230.702190] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9c032db-24d2-4c14-8354-560bc7513925 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.718835] env[61663]: DEBUG nova.compute.provider_tree [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1230.735774] env[61663]: DEBUG nova.scheduler.client.report [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1230.760755] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.262s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1230.761398] env[61663]: DEBUG nova.compute.manager [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1230.765081] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.165s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1230.769367] env[61663]: INFO nova.compute.claims [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1230.826321] env[61663]: DEBUG nova.compute.utils [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1230.827687] env[61663]: DEBUG nova.compute.manager [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1230.828619] env[61663]: DEBUG nova.network.neutron [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1230.843857] env[61663]: DEBUG nova.compute.manager [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1230.929062] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba36ea6-d896-4bc9-b009-15624a80bd90 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.933744] env[61663]: DEBUG nova.compute.manager [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1230.941393] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c35bac-7169-4100-ba79-8f226365d12b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.979067] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3099a007-0592-4fe3-bfe3-4409b826e7f0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.988905] env[61663]: DEBUG nova.virt.hardware [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1230.989802] env[61663]: DEBUG nova.virt.hardware [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1230.989802] env[61663]: DEBUG nova.virt.hardware [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1230.989802] env[61663]: DEBUG nova.virt.hardware [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1230.989802] env[61663]: DEBUG nova.virt.hardware [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1230.989982] env[61663]: DEBUG nova.virt.hardware [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1230.990117] env[61663]: DEBUG nova.virt.hardware [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1230.990508] env[61663]: DEBUG nova.virt.hardware [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1230.990667] env[61663]: DEBUG nova.virt.hardware [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1230.990837] env[61663]: DEBUG nova.virt.hardware [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1230.991020] env[61663]: DEBUG nova.virt.hardware [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1230.992335] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88cc708b-57c9-4988-970a-9130af02b80c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.999554] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1afdb49-4202-4e21-bc05-3d7e41739359 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.013891] env[61663]: DEBUG nova.compute.provider_tree [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1231.018875] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85063348-1284-48ee-9282-bbf6f0af7ee6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.024548] env[61663]: DEBUG nova.scheduler.client.report [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1231.049435] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.284s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1231.049930] env[61663]: DEBUG nova.compute.manager [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1231.091062] env[61663]: DEBUG nova.policy [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e1ab14ab1fd9484bb114cf7e91a0d642', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aac76cd4e64f453e82bbf8a0b80e8c79', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1231.094560] env[61663]: DEBUG nova.compute.utils [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1231.099784] env[61663]: DEBUG nova.compute.manager [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1231.100113] env[61663]: DEBUG nova.network.neutron [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1231.118880] env[61663]: DEBUG nova.compute.manager [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1231.215900] env[61663]: DEBUG nova.compute.manager [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1231.241026] env[61663]: DEBUG nova.virt.hardware [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1231.241683] env[61663]: DEBUG nova.virt.hardware [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1231.241893] env[61663]: DEBUG nova.virt.hardware [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1231.242109] env[61663]: DEBUG nova.virt.hardware [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1231.242263] env[61663]: DEBUG nova.virt.hardware [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1231.242498] env[61663]: DEBUG nova.virt.hardware [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1231.242665] env[61663]: DEBUG nova.virt.hardware [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1231.242854] env[61663]: DEBUG nova.virt.hardware [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1231.243045] env[61663]: DEBUG nova.virt.hardware [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1231.243212] env[61663]: DEBUG nova.virt.hardware [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1231.243387] env[61663]: DEBUG nova.virt.hardware [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1231.244298] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa8735a-0a80-468c-bf5f-c2c8605f117c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.253799] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f028d08a-9c78-4067-9a5a-213c1da0c520 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.549363] env[61663]: DEBUG oslo_concurrency.lockutils [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Acquiring lock "1c36f4af-0222-48d3-ac90-776f7fe807de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.549649] env[61663]: DEBUG oslo_concurrency.lockutils [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Lock "1c36f4af-0222-48d3-ac90-776f7fe807de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1231.563948] env[61663]: DEBUG nova.compute.manager [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1231.641526] env[61663]: DEBUG oslo_concurrency.lockutils [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.641796] env[61663]: DEBUG oslo_concurrency.lockutils [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1231.644942] env[61663]: INFO nova.compute.claims [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1231.692912] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1231.693965] env[61663]: DEBUG nova.policy [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8145cb13cb6f46b1ba1249b10deb463d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a4d694436144cb49f16e11236ccb006', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1231.843198] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11176d9d-5902-4597-b70d-88a319d7bc89 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.854172] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6dc2e9-b381-42e9-a36b-844883a78c69 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.890628] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa70793-3e62-4c8d-86f9-4e5b35cd5b45 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.903560] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f174547f-802f-4234-9585-a5f35cba875c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.923177] env[61663]: DEBUG nova.compute.provider_tree [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1231.935535] env[61663]: DEBUG nova.scheduler.client.report [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1231.959276] env[61663]: DEBUG oslo_concurrency.lockutils [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.317s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1231.959954] env[61663]: DEBUG nova.compute.manager [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1232.011777] env[61663]: DEBUG nova.compute.utils [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1232.013085] env[61663]: DEBUG nova.compute.manager [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1232.013085] env[61663]: DEBUG nova.network.neutron [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1232.029192] env[61663]: DEBUG nova.compute.manager [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1232.121167] env[61663]: DEBUG nova.compute.manager [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1232.153156] env[61663]: DEBUG nova.virt.hardware [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1232.153424] env[61663]: DEBUG nova.virt.hardware [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1232.153739] env[61663]: DEBUG nova.virt.hardware [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1232.153954] env[61663]: DEBUG nova.virt.hardware [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1232.154138] env[61663]: DEBUG nova.virt.hardware [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1232.154308] env[61663]: DEBUG nova.virt.hardware [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1232.154634] env[61663]: DEBUG nova.virt.hardware [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1232.154785] env[61663]: DEBUG nova.virt.hardware [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1232.154952] env[61663]: DEBUG nova.virt.hardware [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1232.155421] env[61663]: DEBUG nova.virt.hardware [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1232.155421] env[61663]: DEBUG nova.virt.hardware [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1232.156467] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1296b42f-d22b-43e2-849d-4d9463bcd7ca {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.165107] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d66278-406f-4c3f-a1ae-61cfc5720c1f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.460592] env[61663]: DEBUG nova.policy [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6376b38384234c2e85587cb304d91dfb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e5a707812754b418cd43ce6f9ff7a33', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1233.002823] env[61663]: DEBUG nova.network.neutron [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Successfully created port: 43b7e5f9-1ccc-4f87-8d13-4aec5abad20a {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1234.109511] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Acquiring lock "5e748c4b-03c5-4a88-a4ed-27093f2aef47" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.109511] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Lock "5e748c4b-03c5-4a88-a4ed-27093f2aef47" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1234.126400] env[61663]: DEBUG nova.compute.manager [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1234.197864] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1234.198007] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1234.199493] env[61663]: INFO nova.compute.claims [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1234.323923] env[61663]: DEBUG nova.network.neutron [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Successfully created port: 921c3035-78ff-4d80-a9e1-092dd4f69aa1 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1234.374110] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c4ae22-4626-4d33-8ebc-367b10c787c6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.386453] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6304fdbe-229c-4e5e-a368-06206cd430ef {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.429650] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d5b358e-f65b-4b3b-ba64-f17c14d73bcc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.440692] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dabf8d0-fabd-4e1d-ad6f-89a28826f884 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.461029] env[61663]: DEBUG nova.compute.provider_tree [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1234.479154] env[61663]: DEBUG nova.scheduler.client.report [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1234.499860] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.302s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1234.500461] env[61663]: DEBUG nova.compute.manager [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1234.548412] env[61663]: DEBUG nova.compute.utils [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1234.549471] env[61663]: DEBUG nova.compute.manager [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1234.549632] env[61663]: DEBUG nova.network.neutron [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1234.563899] env[61663]: DEBUG nova.network.neutron [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Successfully updated port: 5264d767-9664-4859-bf5f-9ac20b39a883 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1234.572411] env[61663]: DEBUG nova.compute.manager [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1234.583145] env[61663]: DEBUG oslo_concurrency.lockutils [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Acquiring lock "refresh_cache-2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1234.583145] env[61663]: DEBUG oslo_concurrency.lockutils [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Acquired lock "refresh_cache-2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1234.583460] env[61663]: DEBUG nova.network.neutron [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1234.660657] env[61663]: DEBUG nova.compute.manager [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1234.692294] env[61663]: DEBUG nova.virt.hardware [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1234.693024] env[61663]: DEBUG nova.virt.hardware [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1234.693024] env[61663]: DEBUG nova.virt.hardware [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1234.693024] env[61663]: DEBUG nova.virt.hardware [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1234.693024] env[61663]: DEBUG nova.virt.hardware [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1234.693197] env[61663]: DEBUG nova.virt.hardware [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1234.696034] env[61663]: DEBUG nova.virt.hardware [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1234.696034] env[61663]: DEBUG nova.virt.hardware [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1234.696034] env[61663]: DEBUG nova.virt.hardware [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1234.696034] env[61663]: DEBUG nova.virt.hardware [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1234.696204] env[61663]: DEBUG nova.virt.hardware [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1234.696204] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d2637d9-ae87-4e2a-a48b-23cb02dc1fbe {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.704123] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ff4fc9-8e28-43b0-a885-0d214f69eb3e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.750833] env[61663]: DEBUG nova.network.neutron [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Successfully created port: 1d551999-71af-42b7-a95c-da072221adba {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1234.809109] env[61663]: DEBUG nova.network.neutron [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1234.903957] env[61663]: DEBUG nova.policy [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e6418497d8b64e8a86366fff9f650c28', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c37f4223e0ca447ca7fe8a7e6158e5df', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1235.913155] env[61663]: DEBUG nova.network.neutron [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Updating instance_info_cache with network_info: [{"id": "5264d767-9664-4859-bf5f-9ac20b39a883", "address": "fa:16:3e:bd:37:6b", "network": {"id": "42ed112c-bbbe-4843-b59c-6ca4f1421c97", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.54", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cafa379ce6b143b88e4741a849af1088", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5264d767-96", "ovs_interfaceid": "5264d767-9664-4859-bf5f-9ac20b39a883", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1235.932177] env[61663]: DEBUG oslo_concurrency.lockutils [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Releasing lock "refresh_cache-2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1235.934016] env[61663]: DEBUG nova.compute.manager [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Instance network_info: |[{"id": "5264d767-9664-4859-bf5f-9ac20b39a883", "address": "fa:16:3e:bd:37:6b", "network": {"id": "42ed112c-bbbe-4843-b59c-6ca4f1421c97", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.54", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cafa379ce6b143b88e4741a849af1088", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5264d767-96", "ovs_interfaceid": "5264d767-9664-4859-bf5f-9ac20b39a883", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1235.934137] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:37:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5264d767-9664-4859-bf5f-9ac20b39a883', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1235.946666] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Creating folder: Project (f4e8a66ac7aa485d98562fbe292efdb6). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1235.946666] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-17ae3d37-ff1f-48b7-9691-4f665c1a10ae {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.959042] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Created folder: Project (f4e8a66ac7aa485d98562fbe292efdb6) in parent group-v352575. [ 1235.959430] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Creating folder: Instances. Parent ref: group-v352579. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1235.960444] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cd9ac285-cc06-4dbb-ae7a-17ad19366f56 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.972043] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Created folder: Instances in parent group-v352579. [ 1235.972043] env[61663]: DEBUG oslo.service.loopingcall [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1235.972043] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1235.972043] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb50b0f7-cd1c-4bb4-bfb0-214d351f07a5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.998941] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1235.998941] env[61663]: value = "task-1690664" [ 1235.998941] env[61663]: _type = "Task" [ 1235.998941] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.003839] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690664, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.288702] env[61663]: DEBUG nova.compute.manager [req-7d0f929a-c559-4a28-98f4-14c09bae3e07 req-f3585eef-fbba-44b4-a69f-e6c50c079fc0 service nova] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Received event network-vif-plugged-5264d767-9664-4859-bf5f-9ac20b39a883 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1236.289012] env[61663]: DEBUG oslo_concurrency.lockutils [req-7d0f929a-c559-4a28-98f4-14c09bae3e07 req-f3585eef-fbba-44b4-a69f-e6c50c079fc0 service nova] Acquiring lock "2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1236.289127] env[61663]: DEBUG oslo_concurrency.lockutils [req-7d0f929a-c559-4a28-98f4-14c09bae3e07 req-f3585eef-fbba-44b4-a69f-e6c50c079fc0 service nova] Lock "2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1236.289293] env[61663]: DEBUG oslo_concurrency.lockutils [req-7d0f929a-c559-4a28-98f4-14c09bae3e07 req-f3585eef-fbba-44b4-a69f-e6c50c079fc0 service nova] Lock "2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1236.289482] env[61663]: DEBUG nova.compute.manager [req-7d0f929a-c559-4a28-98f4-14c09bae3e07 req-f3585eef-fbba-44b4-a69f-e6c50c079fc0 service nova] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] No waiting events found dispatching network-vif-plugged-5264d767-9664-4859-bf5f-9ac20b39a883 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1236.289643] env[61663]: WARNING nova.compute.manager [req-7d0f929a-c559-4a28-98f4-14c09bae3e07 req-f3585eef-fbba-44b4-a69f-e6c50c079fc0 service nova] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Received unexpected event network-vif-plugged-5264d767-9664-4859-bf5f-9ac20b39a883 for instance with vm_state building and task_state spawning. [ 1236.510074] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690664, 'name': CreateVM_Task, 'duration_secs': 0.355284} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.512038] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1236.547691] env[61663]: DEBUG oslo_concurrency.lockutils [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1236.547825] env[61663]: DEBUG oslo_concurrency.lockutils [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1236.548527] env[61663]: DEBUG oslo_concurrency.lockutils [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1236.549073] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74f5660a-8476-469b-909b-3f6c11f70249 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.556829] env[61663]: DEBUG oslo_vmware.api [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Waiting for the task: (returnval){ [ 1236.556829] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c43f1a-4752-7a4a-ee32-13d6a3816347" [ 1236.556829] env[61663]: _type = "Task" [ 1236.556829] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.568917] env[61663]: DEBUG oslo_vmware.api [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c43f1a-4752-7a4a-ee32-13d6a3816347, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.069532] env[61663]: DEBUG oslo_concurrency.lockutils [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1237.070689] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1237.070973] env[61663]: DEBUG oslo_concurrency.lockutils [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1237.101526] env[61663]: DEBUG nova.network.neutron [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Successfully created port: 475823d7-4a8b-4e5b-aacc-52ca3aaab5b8 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1237.225235] env[61663]: DEBUG nova.network.neutron [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Successfully updated port: 43b7e5f9-1ccc-4f87-8d13-4aec5abad20a {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1237.245774] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Acquiring lock "refresh_cache-667bfd5b-1331-4ff4-93ee-eaa7c7cc4246" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1237.245774] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Acquired lock "refresh_cache-667bfd5b-1331-4ff4-93ee-eaa7c7cc4246" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.245774] env[61663]: DEBUG nova.network.neutron [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1237.416529] env[61663]: DEBUG nova.network.neutron [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1237.776465] env[61663]: DEBUG nova.network.neutron [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Successfully updated port: 921c3035-78ff-4d80-a9e1-092dd4f69aa1 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1237.794070] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Acquiring lock "refresh_cache-29bee3d3-a6d2-43a9-8439-d5b842214cf1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1237.794492] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Acquired lock "refresh_cache-29bee3d3-a6d2-43a9-8439-d5b842214cf1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.794492] env[61663]: DEBUG nova.network.neutron [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1237.989170] env[61663]: DEBUG nova.network.neutron [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Successfully updated port: 1d551999-71af-42b7-a95c-da072221adba {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1238.007347] env[61663]: DEBUG oslo_concurrency.lockutils [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Acquiring lock "refresh_cache-1c36f4af-0222-48d3-ac90-776f7fe807de" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1238.007678] env[61663]: DEBUG oslo_concurrency.lockutils [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Acquired lock "refresh_cache-1c36f4af-0222-48d3-ac90-776f7fe807de" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1238.007678] env[61663]: DEBUG nova.network.neutron [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1238.039536] env[61663]: DEBUG nova.network.neutron [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1238.274893] env[61663]: DEBUG nova.network.neutron [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1238.497453] env[61663]: DEBUG nova.network.neutron [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Updating instance_info_cache with network_info: [{"id": "43b7e5f9-1ccc-4f87-8d13-4aec5abad20a", "address": "fa:16:3e:44:f4:c0", "network": {"id": "42ed112c-bbbe-4843-b59c-6ca4f1421c97", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.231", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cafa379ce6b143b88e4741a849af1088", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43b7e5f9-1c", "ovs_interfaceid": "43b7e5f9-1ccc-4f87-8d13-4aec5abad20a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.515099] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Releasing lock "refresh_cache-667bfd5b-1331-4ff4-93ee-eaa7c7cc4246" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1238.515427] env[61663]: DEBUG nova.compute.manager [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Instance network_info: |[{"id": "43b7e5f9-1ccc-4f87-8d13-4aec5abad20a", "address": "fa:16:3e:44:f4:c0", "network": {"id": "42ed112c-bbbe-4843-b59c-6ca4f1421c97", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.231", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cafa379ce6b143b88e4741a849af1088", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43b7e5f9-1c", "ovs_interfaceid": "43b7e5f9-1ccc-4f87-8d13-4aec5abad20a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1238.515843] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:f4:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '43b7e5f9-1ccc-4f87-8d13-4aec5abad20a', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1238.528378] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Creating folder: Project (aac76cd4e64f453e82bbf8a0b80e8c79). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1238.528378] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-62997a4b-0e81-4504-9940-0b1d09314077 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.539345] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Created folder: Project (aac76cd4e64f453e82bbf8a0b80e8c79) in parent group-v352575. [ 1238.542019] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Creating folder: Instances. Parent ref: group-v352582. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1238.542019] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c8c337e2-ab6f-4345-902a-39e9b5ac79d4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.553941] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Created folder: Instances in parent group-v352582. [ 1238.553941] env[61663]: DEBUG oslo.service.loopingcall [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1238.553941] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1238.554334] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d859197a-d527-4e3b-9ece-06bb31513110 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.583891] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1238.583891] env[61663]: value = "task-1690667" [ 1238.583891] env[61663]: _type = "Task" [ 1238.583891] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.596893] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690667, 'name': CreateVM_Task} progress is 6%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.098176] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690667, 'name': CreateVM_Task, 'duration_secs': 0.337251} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.099227] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1239.100189] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1239.100351] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.100680] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1239.101091] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbc0f8f6-0f5b-4833-b677-0c9b882d5b8c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.106460] env[61663]: DEBUG oslo_vmware.api [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Waiting for the task: (returnval){ [ 1239.106460] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]524c1ba0-094f-dea9-9c6e-f52e36037016" [ 1239.106460] env[61663]: _type = "Task" [ 1239.106460] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.119217] env[61663]: DEBUG oslo_vmware.api [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]524c1ba0-094f-dea9-9c6e-f52e36037016, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.466968] env[61663]: DEBUG nova.network.neutron [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Updating instance_info_cache with network_info: [{"id": "921c3035-78ff-4d80-a9e1-092dd4f69aa1", "address": "fa:16:3e:52:e7:52", "network": {"id": "42ed112c-bbbe-4843-b59c-6ca4f1421c97", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.107", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cafa379ce6b143b88e4741a849af1088", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap921c3035-78", "ovs_interfaceid": "921c3035-78ff-4d80-a9e1-092dd4f69aa1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.483837] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Releasing lock "refresh_cache-29bee3d3-a6d2-43a9-8439-d5b842214cf1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1239.484203] env[61663]: DEBUG nova.compute.manager [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Instance network_info: |[{"id": "921c3035-78ff-4d80-a9e1-092dd4f69aa1", "address": "fa:16:3e:52:e7:52", "network": {"id": "42ed112c-bbbe-4843-b59c-6ca4f1421c97", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.107", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cafa379ce6b143b88e4741a849af1088", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap921c3035-78", "ovs_interfaceid": "921c3035-78ff-4d80-a9e1-092dd4f69aa1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1239.485272] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:e7:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '921c3035-78ff-4d80-a9e1-092dd4f69aa1', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1239.494843] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Creating folder: Project (1a4d694436144cb49f16e11236ccb006). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1239.497855] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-82e3e23d-7bc5-4396-b995-5e29116d8dbe {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.514068] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Created folder: Project (1a4d694436144cb49f16e11236ccb006) in parent group-v352575. [ 1239.515945] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Creating folder: Instances. Parent ref: group-v352585. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1239.515945] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c532d727-b42f-4818-ad8b-02aec3e3e48f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.529871] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Created folder: Instances in parent group-v352585. [ 1239.530203] env[61663]: DEBUG oslo.service.loopingcall [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1239.530444] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1239.530691] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c5f1d97a-9c07-482b-86df-babbd0c858ca {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.556019] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1239.556019] env[61663]: value = "task-1690670" [ 1239.556019] env[61663]: _type = "Task" [ 1239.556019] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.564861] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690670, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.617566] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1239.618091] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1239.618178] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1239.660877] env[61663]: DEBUG nova.network.neutron [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Updating instance_info_cache with network_info: [{"id": "1d551999-71af-42b7-a95c-da072221adba", "address": "fa:16:3e:96:7b:78", "network": {"id": "42ed112c-bbbe-4843-b59c-6ca4f1421c97", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.225", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cafa379ce6b143b88e4741a849af1088", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d551999-71", "ovs_interfaceid": "1d551999-71af-42b7-a95c-da072221adba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.683863] env[61663]: DEBUG oslo_concurrency.lockutils [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Releasing lock "refresh_cache-1c36f4af-0222-48d3-ac90-776f7fe807de" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1239.684304] env[61663]: DEBUG nova.compute.manager [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Instance network_info: |[{"id": "1d551999-71af-42b7-a95c-da072221adba", "address": "fa:16:3e:96:7b:78", "network": {"id": "42ed112c-bbbe-4843-b59c-6ca4f1421c97", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.225", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cafa379ce6b143b88e4741a849af1088", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d551999-71", "ovs_interfaceid": "1d551999-71af-42b7-a95c-da072221adba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1239.685288] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:7b:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1d551999-71af-42b7-a95c-da072221adba', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1239.696968] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Creating folder: Project (7e5a707812754b418cd43ce6f9ff7a33). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1239.697878] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2f078ec-4153-4713-a63a-03c2c52e919d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.712938] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Created folder: Project (7e5a707812754b418cd43ce6f9ff7a33) in parent group-v352575. [ 1239.712938] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Creating folder: Instances. Parent ref: group-v352588. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1239.713271] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5b99134d-75ea-41e8-9d94-5cd2024f84dd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.724131] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Created folder: Instances in parent group-v352588. [ 1239.724422] env[61663]: DEBUG oslo.service.loopingcall [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1239.724646] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1239.724872] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be291fd3-f800-49db-a294-0cf439af348d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.750032] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1239.750032] env[61663]: value = "task-1690673" [ 1239.750032] env[61663]: _type = "Task" [ 1239.750032] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.758378] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690673, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.067145] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690670, 'name': CreateVM_Task, 'duration_secs': 0.33246} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.068117] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1240.068612] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1240.069141] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.070103] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1240.070103] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4faad4b-adcf-4554-a8dd-ccd16d7893e7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.076903] env[61663]: DEBUG oslo_vmware.api [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Waiting for the task: (returnval){ [ 1240.076903] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52df36f6-9d09-c009-e64f-fb933843cada" [ 1240.076903] env[61663]: _type = "Task" [ 1240.076903] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.093205] env[61663]: DEBUG oslo_vmware.api [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52df36f6-9d09-c009-e64f-fb933843cada, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.262773] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690673, 'name': CreateVM_Task, 'duration_secs': 0.294118} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.262972] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1240.263709] env[61663]: DEBUG oslo_concurrency.lockutils [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1240.392580] env[61663]: DEBUG nova.network.neutron [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Successfully updated port: 475823d7-4a8b-4e5b-aacc-52ca3aaab5b8 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1240.413348] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Acquiring lock "refresh_cache-5e748c4b-03c5-4a88-a4ed-27093f2aef47" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1240.413889] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Acquired lock "refresh_cache-5e748c4b-03c5-4a88-a4ed-27093f2aef47" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.414102] env[61663]: DEBUG nova.network.neutron [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1240.543480] env[61663]: DEBUG nova.network.neutron [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1240.606311] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1240.606579] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1240.606785] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1240.606997] env[61663]: DEBUG oslo_concurrency.lockutils [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1240.607304] env[61663]: DEBUG oslo_concurrency.lockutils [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1240.607855] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b74e222-0c0c-4c77-92a7-7c1708035288 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.621542] env[61663]: DEBUG oslo_vmware.api [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Waiting for the task: (returnval){ [ 1240.621542] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ac30df-00f4-d4dc-3a47-108b7cf744fb" [ 1240.621542] env[61663]: _type = "Task" [ 1240.621542] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.639533] env[61663]: DEBUG oslo_concurrency.lockutils [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1240.640214] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1240.640476] env[61663]: DEBUG oslo_concurrency.lockutils [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1240.684284] env[61663]: DEBUG nova.compute.manager [req-ea5a9949-0aba-4179-9818-6113df6c21d6 req-5b810f17-54d8-45a0-a6f1-84a4e0e80c1b service nova] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Received event network-vif-plugged-921c3035-78ff-4d80-a9e1-092dd4f69aa1 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1240.688518] env[61663]: DEBUG oslo_concurrency.lockutils [req-ea5a9949-0aba-4179-9818-6113df6c21d6 req-5b810f17-54d8-45a0-a6f1-84a4e0e80c1b service nova] Acquiring lock "29bee3d3-a6d2-43a9-8439-d5b842214cf1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1240.688790] env[61663]: DEBUG oslo_concurrency.lockutils [req-ea5a9949-0aba-4179-9818-6113df6c21d6 req-5b810f17-54d8-45a0-a6f1-84a4e0e80c1b service nova] Lock "29bee3d3-a6d2-43a9-8439-d5b842214cf1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.004s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1240.688967] env[61663]: DEBUG oslo_concurrency.lockutils [req-ea5a9949-0aba-4179-9818-6113df6c21d6 req-5b810f17-54d8-45a0-a6f1-84a4e0e80c1b service nova] Lock "29bee3d3-a6d2-43a9-8439-d5b842214cf1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1240.689167] env[61663]: DEBUG nova.compute.manager [req-ea5a9949-0aba-4179-9818-6113df6c21d6 req-5b810f17-54d8-45a0-a6f1-84a4e0e80c1b service nova] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] No waiting events found dispatching network-vif-plugged-921c3035-78ff-4d80-a9e1-092dd4f69aa1 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1240.689596] env[61663]: WARNING nova.compute.manager [req-ea5a9949-0aba-4179-9818-6113df6c21d6 req-5b810f17-54d8-45a0-a6f1-84a4e0e80c1b service nova] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Received unexpected event network-vif-plugged-921c3035-78ff-4d80-a9e1-092dd4f69aa1 for instance with vm_state building and task_state spawning. [ 1241.185620] env[61663]: DEBUG nova.compute.manager [req-7c10c13c-bc31-472e-ad79-31d26accaf9f req-506b83b8-9add-4f2c-8355-c0c96abea0ee service nova] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Received event network-changed-5264d767-9664-4859-bf5f-9ac20b39a883 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1241.185830] env[61663]: DEBUG nova.compute.manager [req-7c10c13c-bc31-472e-ad79-31d26accaf9f req-506b83b8-9add-4f2c-8355-c0c96abea0ee service nova] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Refreshing instance network info cache due to event network-changed-5264d767-9664-4859-bf5f-9ac20b39a883. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1241.186057] env[61663]: DEBUG oslo_concurrency.lockutils [req-7c10c13c-bc31-472e-ad79-31d26accaf9f req-506b83b8-9add-4f2c-8355-c0c96abea0ee service nova] Acquiring lock "refresh_cache-2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1241.186219] env[61663]: DEBUG oslo_concurrency.lockutils [req-7c10c13c-bc31-472e-ad79-31d26accaf9f req-506b83b8-9add-4f2c-8355-c0c96abea0ee service nova] Acquired lock "refresh_cache-2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1241.186390] env[61663]: DEBUG nova.network.neutron [req-7c10c13c-bc31-472e-ad79-31d26accaf9f req-506b83b8-9add-4f2c-8355-c0c96abea0ee service nova] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Refreshing network info cache for port 5264d767-9664-4859-bf5f-9ac20b39a883 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1241.238312] env[61663]: DEBUG nova.network.neutron [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Updating instance_info_cache with network_info: [{"id": "475823d7-4a8b-4e5b-aacc-52ca3aaab5b8", "address": "fa:16:3e:64:03:33", "network": {"id": "8c9dcdfc-a888-4979-a0da-9e95d4b9ea20", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1438693566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c37f4223e0ca447ca7fe8a7e6158e5df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap475823d7-4a", "ovs_interfaceid": "475823d7-4a8b-4e5b-aacc-52ca3aaab5b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1241.256024] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Releasing lock "refresh_cache-5e748c4b-03c5-4a88-a4ed-27093f2aef47" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1241.256804] env[61663]: DEBUG nova.compute.manager [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Instance network_info: |[{"id": "475823d7-4a8b-4e5b-aacc-52ca3aaab5b8", "address": "fa:16:3e:64:03:33", "network": {"id": "8c9dcdfc-a888-4979-a0da-9e95d4b9ea20", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1438693566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c37f4223e0ca447ca7fe8a7e6158e5df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap475823d7-4a", "ovs_interfaceid": "475823d7-4a8b-4e5b-aacc-52ca3aaab5b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1241.257507] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:03:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa410d21-2141-45bb-8d0b-16c77304605f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '475823d7-4a8b-4e5b-aacc-52ca3aaab5b8', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1241.268699] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Creating folder: Project (c37f4223e0ca447ca7fe8a7e6158e5df). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1241.268699] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3d9b7af8-41ee-46a8-afb6-ccc4db031865 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.282635] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Created folder: Project (c37f4223e0ca447ca7fe8a7e6158e5df) in parent group-v352575. [ 1241.282722] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Creating folder: Instances. Parent ref: group-v352591. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1241.283394] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d35c3b2-c168-4334-b3da-4c48eee14575 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.295936] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Created folder: Instances in parent group-v352591. [ 1241.296214] env[61663]: DEBUG oslo.service.loopingcall [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1241.296407] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1241.296608] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-595c7e2c-ca94-4825-a4dc-c2b8099802f2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.318240] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1241.318240] env[61663]: value = "task-1690676" [ 1241.318240] env[61663]: _type = "Task" [ 1241.318240] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.328494] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690676, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.819486] env[61663]: DEBUG nova.network.neutron [req-7c10c13c-bc31-472e-ad79-31d26accaf9f req-506b83b8-9add-4f2c-8355-c0c96abea0ee service nova] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Updated VIF entry in instance network info cache for port 5264d767-9664-4859-bf5f-9ac20b39a883. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1241.819911] env[61663]: DEBUG nova.network.neutron [req-7c10c13c-bc31-472e-ad79-31d26accaf9f req-506b83b8-9add-4f2c-8355-c0c96abea0ee service nova] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Updating instance_info_cache with network_info: [{"id": "5264d767-9664-4859-bf5f-9ac20b39a883", "address": "fa:16:3e:bd:37:6b", "network": {"id": "42ed112c-bbbe-4843-b59c-6ca4f1421c97", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.54", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cafa379ce6b143b88e4741a849af1088", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5264d767-96", "ovs_interfaceid": "5264d767-9664-4859-bf5f-9ac20b39a883", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1241.847295] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690676, 'name': CreateVM_Task, 'duration_secs': 0.485001} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.847458] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1241.850307] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1241.850307] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1241.850307] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1241.850307] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e28dd026-887b-41aa-b964-69ac1e2b9035 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.855421] env[61663]: DEBUG oslo_concurrency.lockutils [req-7c10c13c-bc31-472e-ad79-31d26accaf9f req-506b83b8-9add-4f2c-8355-c0c96abea0ee service nova] Releasing lock "refresh_cache-2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1241.856484] env[61663]: DEBUG nova.compute.manager [req-7c10c13c-bc31-472e-ad79-31d26accaf9f req-506b83b8-9add-4f2c-8355-c0c96abea0ee service nova] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Received event network-vif-plugged-43b7e5f9-1ccc-4f87-8d13-4aec5abad20a {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1241.856484] env[61663]: DEBUG oslo_concurrency.lockutils [req-7c10c13c-bc31-472e-ad79-31d26accaf9f req-506b83b8-9add-4f2c-8355-c0c96abea0ee service nova] Acquiring lock "667bfd5b-1331-4ff4-93ee-eaa7c7cc4246-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1241.856484] env[61663]: DEBUG oslo_concurrency.lockutils [req-7c10c13c-bc31-472e-ad79-31d26accaf9f req-506b83b8-9add-4f2c-8355-c0c96abea0ee service nova] Lock "667bfd5b-1331-4ff4-93ee-eaa7c7cc4246-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1241.857493] env[61663]: DEBUG oslo_concurrency.lockutils [req-7c10c13c-bc31-472e-ad79-31d26accaf9f req-506b83b8-9add-4f2c-8355-c0c96abea0ee service nova] Lock "667bfd5b-1331-4ff4-93ee-eaa7c7cc4246-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1241.857493] env[61663]: DEBUG nova.compute.manager [req-7c10c13c-bc31-472e-ad79-31d26accaf9f req-506b83b8-9add-4f2c-8355-c0c96abea0ee service nova] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] No waiting events found dispatching network-vif-plugged-43b7e5f9-1ccc-4f87-8d13-4aec5abad20a {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1241.857493] env[61663]: WARNING nova.compute.manager [req-7c10c13c-bc31-472e-ad79-31d26accaf9f req-506b83b8-9add-4f2c-8355-c0c96abea0ee service nova] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Received unexpected event network-vif-plugged-43b7e5f9-1ccc-4f87-8d13-4aec5abad20a for instance with vm_state building and task_state spawning. [ 1241.857637] env[61663]: DEBUG nova.compute.manager [req-7c10c13c-bc31-472e-ad79-31d26accaf9f req-506b83b8-9add-4f2c-8355-c0c96abea0ee service nova] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Received event network-changed-43b7e5f9-1ccc-4f87-8d13-4aec5abad20a {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1241.859029] env[61663]: DEBUG nova.compute.manager [req-7c10c13c-bc31-472e-ad79-31d26accaf9f req-506b83b8-9add-4f2c-8355-c0c96abea0ee service nova] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Refreshing instance network info cache due to event network-changed-43b7e5f9-1ccc-4f87-8d13-4aec5abad20a. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1241.859029] env[61663]: DEBUG oslo_concurrency.lockutils [req-7c10c13c-bc31-472e-ad79-31d26accaf9f req-506b83b8-9add-4f2c-8355-c0c96abea0ee service nova] Acquiring lock "refresh_cache-667bfd5b-1331-4ff4-93ee-eaa7c7cc4246" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1241.859029] env[61663]: DEBUG oslo_concurrency.lockutils [req-7c10c13c-bc31-472e-ad79-31d26accaf9f req-506b83b8-9add-4f2c-8355-c0c96abea0ee service nova] Acquired lock "refresh_cache-667bfd5b-1331-4ff4-93ee-eaa7c7cc4246" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1241.859029] env[61663]: DEBUG nova.network.neutron [req-7c10c13c-bc31-472e-ad79-31d26accaf9f req-506b83b8-9add-4f2c-8355-c0c96abea0ee service nova] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Refreshing network info cache for port 43b7e5f9-1ccc-4f87-8d13-4aec5abad20a {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1241.867106] env[61663]: DEBUG oslo_vmware.api [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Waiting for the task: (returnval){ [ 1241.867106] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527e0625-15a5-1b24-dc55-4133d4e05ae2" [ 1241.867106] env[61663]: _type = "Task" [ 1241.867106] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.883329] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1241.883568] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1241.883784] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1242.975880] env[61663]: DEBUG nova.network.neutron [req-7c10c13c-bc31-472e-ad79-31d26accaf9f req-506b83b8-9add-4f2c-8355-c0c96abea0ee service nova] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Updated VIF entry in instance network info cache for port 43b7e5f9-1ccc-4f87-8d13-4aec5abad20a. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1242.976178] env[61663]: DEBUG nova.network.neutron [req-7c10c13c-bc31-472e-ad79-31d26accaf9f req-506b83b8-9add-4f2c-8355-c0c96abea0ee service nova] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Updating instance_info_cache with network_info: [{"id": "43b7e5f9-1ccc-4f87-8d13-4aec5abad20a", "address": "fa:16:3e:44:f4:c0", "network": {"id": "42ed112c-bbbe-4843-b59c-6ca4f1421c97", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.231", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cafa379ce6b143b88e4741a849af1088", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43b7e5f9-1c", "ovs_interfaceid": "43b7e5f9-1ccc-4f87-8d13-4aec5abad20a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1242.996300] env[61663]: DEBUG oslo_concurrency.lockutils [req-7c10c13c-bc31-472e-ad79-31d26accaf9f req-506b83b8-9add-4f2c-8355-c0c96abea0ee service nova] Releasing lock "refresh_cache-667bfd5b-1331-4ff4-93ee-eaa7c7cc4246" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1243.893860] env[61663]: DEBUG oslo_concurrency.lockutils [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Acquiring lock "71509f58-5616-4d6a-9a88-3bfd9d414a0c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1243.894098] env[61663]: DEBUG oslo_concurrency.lockutils [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Lock "71509f58-5616-4d6a-9a88-3bfd9d414a0c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1243.911076] env[61663]: DEBUG nova.compute.manager [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1244.008883] env[61663]: DEBUG oslo_concurrency.lockutils [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1244.009185] env[61663]: DEBUG oslo_concurrency.lockutils [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1244.010868] env[61663]: INFO nova.compute.claims [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1244.243465] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ffa6faf-4241-4b56-bbd9-589e7fe59ea7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.253765] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-697673f1-9172-4821-a577-3413400633f7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.293031] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c088254-99bb-4c12-8147-7e779ed7c747 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.302315] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5011c3b7-5a29-4ca3-97dd-b8b67461743f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.316371] env[61663]: DEBUG nova.compute.provider_tree [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1244.328364] env[61663]: DEBUG nova.scheduler.client.report [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1244.352009] env[61663]: DEBUG oslo_concurrency.lockutils [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.343s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1244.353648] env[61663]: DEBUG nova.compute.manager [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1244.408954] env[61663]: DEBUG nova.compute.utils [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1244.411039] env[61663]: DEBUG nova.compute.manager [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1244.411298] env[61663]: DEBUG nova.network.neutron [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1244.425729] env[61663]: DEBUG nova.compute.manager [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1244.526980] env[61663]: DEBUG nova.compute.manager [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1244.566904] env[61663]: DEBUG nova.virt.hardware [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1244.566904] env[61663]: DEBUG nova.virt.hardware [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1244.566904] env[61663]: DEBUG nova.virt.hardware [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1244.567198] env[61663]: DEBUG nova.virt.hardware [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1244.567198] env[61663]: DEBUG nova.virt.hardware [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1244.567198] env[61663]: DEBUG nova.virt.hardware [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1244.567198] env[61663]: DEBUG nova.virt.hardware [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1244.567198] env[61663]: DEBUG nova.virt.hardware [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1244.567414] env[61663]: DEBUG nova.virt.hardware [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1244.567414] env[61663]: DEBUG nova.virt.hardware [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1244.567414] env[61663]: DEBUG nova.virt.hardware [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1244.567666] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f9e03f-30d6-4e3f-b1cc-945b71be7451 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.577729] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed936b8c-d11a-4485-a204-d8fc05e12774 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.683866] env[61663]: DEBUG nova.policy [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ca4c35720aa4f7fa6f2c95f1f1e2f5e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '59d9fb219eec43ba81650805a579f52b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1245.462293] env[61663]: DEBUG nova.compute.manager [req-f83ecaf3-cf6c-4062-ab0a-dcfc3d535092 req-0cc9218b-7795-4ad9-9807-f5cc43b2436f service nova] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Received event network-vif-plugged-1d551999-71af-42b7-a95c-da072221adba {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1245.462538] env[61663]: DEBUG oslo_concurrency.lockutils [req-f83ecaf3-cf6c-4062-ab0a-dcfc3d535092 req-0cc9218b-7795-4ad9-9807-f5cc43b2436f service nova] Acquiring lock "1c36f4af-0222-48d3-ac90-776f7fe807de-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1245.463609] env[61663]: DEBUG oslo_concurrency.lockutils [req-f83ecaf3-cf6c-4062-ab0a-dcfc3d535092 req-0cc9218b-7795-4ad9-9807-f5cc43b2436f service nova] Lock "1c36f4af-0222-48d3-ac90-776f7fe807de-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1245.463609] env[61663]: DEBUG oslo_concurrency.lockutils [req-f83ecaf3-cf6c-4062-ab0a-dcfc3d535092 req-0cc9218b-7795-4ad9-9807-f5cc43b2436f service nova] Lock "1c36f4af-0222-48d3-ac90-776f7fe807de-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1245.463609] env[61663]: DEBUG nova.compute.manager [req-f83ecaf3-cf6c-4062-ab0a-dcfc3d535092 req-0cc9218b-7795-4ad9-9807-f5cc43b2436f service nova] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] No waiting events found dispatching network-vif-plugged-1d551999-71af-42b7-a95c-da072221adba {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1245.463609] env[61663]: WARNING nova.compute.manager [req-f83ecaf3-cf6c-4062-ab0a-dcfc3d535092 req-0cc9218b-7795-4ad9-9807-f5cc43b2436f service nova] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Received unexpected event network-vif-plugged-1d551999-71af-42b7-a95c-da072221adba for instance with vm_state building and task_state spawning. [ 1245.463854] env[61663]: DEBUG nova.compute.manager [req-f83ecaf3-cf6c-4062-ab0a-dcfc3d535092 req-0cc9218b-7795-4ad9-9807-f5cc43b2436f service nova] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Received event network-changed-921c3035-78ff-4d80-a9e1-092dd4f69aa1 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1245.463854] env[61663]: DEBUG nova.compute.manager [req-f83ecaf3-cf6c-4062-ab0a-dcfc3d535092 req-0cc9218b-7795-4ad9-9807-f5cc43b2436f service nova] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Refreshing instance network info cache due to event network-changed-921c3035-78ff-4d80-a9e1-092dd4f69aa1. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1245.463854] env[61663]: DEBUG oslo_concurrency.lockutils [req-f83ecaf3-cf6c-4062-ab0a-dcfc3d535092 req-0cc9218b-7795-4ad9-9807-f5cc43b2436f service nova] Acquiring lock "refresh_cache-29bee3d3-a6d2-43a9-8439-d5b842214cf1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1245.463948] env[61663]: DEBUG oslo_concurrency.lockutils [req-f83ecaf3-cf6c-4062-ab0a-dcfc3d535092 req-0cc9218b-7795-4ad9-9807-f5cc43b2436f service nova] Acquired lock "refresh_cache-29bee3d3-a6d2-43a9-8439-d5b842214cf1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1245.464467] env[61663]: DEBUG nova.network.neutron [req-f83ecaf3-cf6c-4062-ab0a-dcfc3d535092 req-0cc9218b-7795-4ad9-9807-f5cc43b2436f service nova] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Refreshing network info cache for port 921c3035-78ff-4d80-a9e1-092dd4f69aa1 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1246.169935] env[61663]: DEBUG nova.network.neutron [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Successfully created port: ce1d802a-f0d8-404b-89b7-6a22a35fed90 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1246.257937] env[61663]: DEBUG nova.network.neutron [req-f83ecaf3-cf6c-4062-ab0a-dcfc3d535092 req-0cc9218b-7795-4ad9-9807-f5cc43b2436f service nova] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Updated VIF entry in instance network info cache for port 921c3035-78ff-4d80-a9e1-092dd4f69aa1. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1246.258325] env[61663]: DEBUG nova.network.neutron [req-f83ecaf3-cf6c-4062-ab0a-dcfc3d535092 req-0cc9218b-7795-4ad9-9807-f5cc43b2436f service nova] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Updating instance_info_cache with network_info: [{"id": "921c3035-78ff-4d80-a9e1-092dd4f69aa1", "address": "fa:16:3e:52:e7:52", "network": {"id": "42ed112c-bbbe-4843-b59c-6ca4f1421c97", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.107", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cafa379ce6b143b88e4741a849af1088", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap921c3035-78", "ovs_interfaceid": "921c3035-78ff-4d80-a9e1-092dd4f69aa1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1246.285819] env[61663]: DEBUG oslo_concurrency.lockutils [req-f83ecaf3-cf6c-4062-ab0a-dcfc3d535092 req-0cc9218b-7795-4ad9-9807-f5cc43b2436f service nova] Releasing lock "refresh_cache-29bee3d3-a6d2-43a9-8439-d5b842214cf1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1246.286096] env[61663]: DEBUG nova.compute.manager [req-f83ecaf3-cf6c-4062-ab0a-dcfc3d535092 req-0cc9218b-7795-4ad9-9807-f5cc43b2436f service nova] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Received event network-changed-1d551999-71af-42b7-a95c-da072221adba {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1246.286267] env[61663]: DEBUG nova.compute.manager [req-f83ecaf3-cf6c-4062-ab0a-dcfc3d535092 req-0cc9218b-7795-4ad9-9807-f5cc43b2436f service nova] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Refreshing instance network info cache due to event network-changed-1d551999-71af-42b7-a95c-da072221adba. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1246.286488] env[61663]: DEBUG oslo_concurrency.lockutils [req-f83ecaf3-cf6c-4062-ab0a-dcfc3d535092 req-0cc9218b-7795-4ad9-9807-f5cc43b2436f service nova] Acquiring lock "refresh_cache-1c36f4af-0222-48d3-ac90-776f7fe807de" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1246.286646] env[61663]: DEBUG oslo_concurrency.lockutils [req-f83ecaf3-cf6c-4062-ab0a-dcfc3d535092 req-0cc9218b-7795-4ad9-9807-f5cc43b2436f service nova] Acquired lock "refresh_cache-1c36f4af-0222-48d3-ac90-776f7fe807de" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1246.286813] env[61663]: DEBUG nova.network.neutron [req-f83ecaf3-cf6c-4062-ab0a-dcfc3d535092 req-0cc9218b-7795-4ad9-9807-f5cc43b2436f service nova] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Refreshing network info cache for port 1d551999-71af-42b7-a95c-da072221adba {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1246.316899] env[61663]: DEBUG nova.compute.manager [req-c2fcbcd3-4939-40cb-ad27-794ce8fa475b req-99b03224-8684-41e2-b818-6d7563ece962 service nova] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Received event network-vif-plugged-475823d7-4a8b-4e5b-aacc-52ca3aaab5b8 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1246.316899] env[61663]: DEBUG oslo_concurrency.lockutils [req-c2fcbcd3-4939-40cb-ad27-794ce8fa475b req-99b03224-8684-41e2-b818-6d7563ece962 service nova] Acquiring lock "5e748c4b-03c5-4a88-a4ed-27093f2aef47-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1246.317162] env[61663]: DEBUG oslo_concurrency.lockutils [req-c2fcbcd3-4939-40cb-ad27-794ce8fa475b req-99b03224-8684-41e2-b818-6d7563ece962 service nova] Lock "5e748c4b-03c5-4a88-a4ed-27093f2aef47-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1246.317216] env[61663]: DEBUG oslo_concurrency.lockutils [req-c2fcbcd3-4939-40cb-ad27-794ce8fa475b req-99b03224-8684-41e2-b818-6d7563ece962 service nova] Lock "5e748c4b-03c5-4a88-a4ed-27093f2aef47-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1246.317512] env[61663]: DEBUG nova.compute.manager [req-c2fcbcd3-4939-40cb-ad27-794ce8fa475b req-99b03224-8684-41e2-b818-6d7563ece962 service nova] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] No waiting events found dispatching network-vif-plugged-475823d7-4a8b-4e5b-aacc-52ca3aaab5b8 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1246.317512] env[61663]: WARNING nova.compute.manager [req-c2fcbcd3-4939-40cb-ad27-794ce8fa475b req-99b03224-8684-41e2-b818-6d7563ece962 service nova] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Received unexpected event network-vif-plugged-475823d7-4a8b-4e5b-aacc-52ca3aaab5b8 for instance with vm_state building and task_state spawning. [ 1246.318930] env[61663]: DEBUG nova.compute.manager [req-c2fcbcd3-4939-40cb-ad27-794ce8fa475b req-99b03224-8684-41e2-b818-6d7563ece962 service nova] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Received event network-changed-475823d7-4a8b-4e5b-aacc-52ca3aaab5b8 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1246.318930] env[61663]: DEBUG nova.compute.manager [req-c2fcbcd3-4939-40cb-ad27-794ce8fa475b req-99b03224-8684-41e2-b818-6d7563ece962 service nova] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Refreshing instance network info cache due to event network-changed-475823d7-4a8b-4e5b-aacc-52ca3aaab5b8. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1246.318930] env[61663]: DEBUG oslo_concurrency.lockutils [req-c2fcbcd3-4939-40cb-ad27-794ce8fa475b req-99b03224-8684-41e2-b818-6d7563ece962 service nova] Acquiring lock "refresh_cache-5e748c4b-03c5-4a88-a4ed-27093f2aef47" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1246.320613] env[61663]: DEBUG oslo_concurrency.lockutils [req-c2fcbcd3-4939-40cb-ad27-794ce8fa475b req-99b03224-8684-41e2-b818-6d7563ece962 service nova] Acquired lock "refresh_cache-5e748c4b-03c5-4a88-a4ed-27093f2aef47" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1246.320811] env[61663]: DEBUG nova.network.neutron [req-c2fcbcd3-4939-40cb-ad27-794ce8fa475b req-99b03224-8684-41e2-b818-6d7563ece962 service nova] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Refreshing network info cache for port 475823d7-4a8b-4e5b-aacc-52ca3aaab5b8 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1246.793263] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Acquiring lock "790791ee-4e6c-4116-8ade-ba61f55ebd4d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1246.793595] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Lock "790791ee-4e6c-4116-8ade-ba61f55ebd4d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1246.816976] env[61663]: DEBUG nova.compute.manager [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1246.906233] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1246.906503] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1246.908716] env[61663]: INFO nova.compute.claims [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1247.117708] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb84163b-7a50-4126-b30d-6b53ae12fc36 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.126598] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c444bc-a2ce-497f-b54e-6d8f7abe8a23 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.163655] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba3f077-bed7-48c1-bdd0-4e17e8ec7b7e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.173601] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd33684-6aac-46a3-ab19-69a283a8c20d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.187256] env[61663]: DEBUG nova.compute.provider_tree [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1247.198677] env[61663]: DEBUG nova.scheduler.client.report [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1247.220551] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.314s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1247.221077] env[61663]: DEBUG nova.compute.manager [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1247.266725] env[61663]: DEBUG nova.compute.utils [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1247.272307] env[61663]: DEBUG nova.compute.manager [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1247.272307] env[61663]: DEBUG nova.network.neutron [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1247.287777] env[61663]: DEBUG nova.compute.manager [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1247.383252] env[61663]: DEBUG nova.compute.manager [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1247.404254] env[61663]: DEBUG nova.network.neutron [req-f83ecaf3-cf6c-4062-ab0a-dcfc3d535092 req-0cc9218b-7795-4ad9-9807-f5cc43b2436f service nova] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Updated VIF entry in instance network info cache for port 1d551999-71af-42b7-a95c-da072221adba. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1247.407015] env[61663]: DEBUG nova.network.neutron [req-f83ecaf3-cf6c-4062-ab0a-dcfc3d535092 req-0cc9218b-7795-4ad9-9807-f5cc43b2436f service nova] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Updating instance_info_cache with network_info: [{"id": "1d551999-71af-42b7-a95c-da072221adba", "address": "fa:16:3e:96:7b:78", "network": {"id": "42ed112c-bbbe-4843-b59c-6ca4f1421c97", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.225", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cafa379ce6b143b88e4741a849af1088", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d551999-71", "ovs_interfaceid": "1d551999-71af-42b7-a95c-da072221adba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.415458] env[61663]: DEBUG nova.virt.hardware [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1247.415631] env[61663]: DEBUG nova.virt.hardware [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1247.415749] env[61663]: DEBUG nova.virt.hardware [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1247.415928] env[61663]: DEBUG nova.virt.hardware [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1247.416481] env[61663]: DEBUG nova.virt.hardware [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1247.416687] env[61663]: DEBUG nova.virt.hardware [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1247.416907] env[61663]: DEBUG nova.virt.hardware [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1247.417190] env[61663]: DEBUG nova.virt.hardware [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1247.417835] env[61663]: DEBUG nova.virt.hardware [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1247.417835] env[61663]: DEBUG nova.virt.hardware [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1247.417835] env[61663]: DEBUG nova.virt.hardware [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1247.418766] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a259313d-2953-498b-a61a-e7ea80977b35 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.422809] env[61663]: DEBUG oslo_concurrency.lockutils [req-f83ecaf3-cf6c-4062-ab0a-dcfc3d535092 req-0cc9218b-7795-4ad9-9807-f5cc43b2436f service nova] Releasing lock "refresh_cache-1c36f4af-0222-48d3-ac90-776f7fe807de" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1247.431919] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be51e03c-f96e-48a6-9b6b-e6320f3c9e28 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.542677] env[61663]: DEBUG nova.policy [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b9ccd477bd26463580897be0257e4398', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35fd7b809f894a18b89bcf60fa56eac1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1247.759452] env[61663]: DEBUG nova.network.neutron [req-c2fcbcd3-4939-40cb-ad27-794ce8fa475b req-99b03224-8684-41e2-b818-6d7563ece962 service nova] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Updated VIF entry in instance network info cache for port 475823d7-4a8b-4e5b-aacc-52ca3aaab5b8. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1247.759816] env[61663]: DEBUG nova.network.neutron [req-c2fcbcd3-4939-40cb-ad27-794ce8fa475b req-99b03224-8684-41e2-b818-6d7563ece962 service nova] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Updating instance_info_cache with network_info: [{"id": "475823d7-4a8b-4e5b-aacc-52ca3aaab5b8", "address": "fa:16:3e:64:03:33", "network": {"id": "8c9dcdfc-a888-4979-a0da-9e95d4b9ea20", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1438693566-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c37f4223e0ca447ca7fe8a7e6158e5df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa410d21-2141-45bb-8d0b-16c77304605f", "external-id": "nsx-vlan-transportzone-886", "segmentation_id": 886, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap475823d7-4a", "ovs_interfaceid": "475823d7-4a8b-4e5b-aacc-52ca3aaab5b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.774068] env[61663]: DEBUG oslo_concurrency.lockutils [req-c2fcbcd3-4939-40cb-ad27-794ce8fa475b req-99b03224-8684-41e2-b818-6d7563ece962 service nova] Releasing lock "refresh_cache-5e748c4b-03c5-4a88-a4ed-27093f2aef47" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1248.699730] env[61663]: DEBUG nova.network.neutron [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Successfully created port: 7c42264c-fb03-4f0b-8e1f-d8e8f484fc9c {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1249.216684] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Acquiring lock "f9a675b6-e76d-492b-ac34-3c7b10553fca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.216845] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Lock "f9a675b6-e76d-492b-ac34-3c7b10553fca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1249.236751] env[61663]: DEBUG nova.compute.manager [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1249.297355] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.297607] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1249.299227] env[61663]: INFO nova.compute.claims [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1249.420463] env[61663]: DEBUG nova.network.neutron [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Successfully updated port: ce1d802a-f0d8-404b-89b7-6a22a35fed90 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1249.437844] env[61663]: DEBUG oslo_concurrency.lockutils [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Acquiring lock "refresh_cache-71509f58-5616-4d6a-9a88-3bfd9d414a0c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1249.437994] env[61663]: DEBUG oslo_concurrency.lockutils [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Acquired lock "refresh_cache-71509f58-5616-4d6a-9a88-3bfd9d414a0c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1249.438165] env[61663]: DEBUG nova.network.neutron [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1249.520035] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3465602-0ba9-44f9-91e1-d2a8c5ae0716 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.529010] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-017ff85e-ca7e-415c-b44d-2a3c2110c490 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.568505] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e765552f-079f-48d9-84a1-cf5592929d73 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.576548] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb648529-85d0-4f31-9f34-fdb7ed6c718b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.591362] env[61663]: DEBUG nova.compute.provider_tree [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1249.602543] env[61663]: DEBUG nova.scheduler.client.report [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1249.624824] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.327s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1249.625346] env[61663]: DEBUG nova.compute.manager [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1249.675554] env[61663]: DEBUG nova.compute.utils [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1249.680149] env[61663]: DEBUG nova.compute.manager [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1249.680149] env[61663]: DEBUG nova.network.neutron [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1249.693635] env[61663]: DEBUG nova.compute.manager [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1249.812748] env[61663]: DEBUG nova.compute.manager [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1249.818019] env[61663]: DEBUG nova.network.neutron [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1249.854587] env[61663]: DEBUG nova.virt.hardware [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1249.854876] env[61663]: DEBUG nova.virt.hardware [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1249.854958] env[61663]: DEBUG nova.virt.hardware [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1249.855117] env[61663]: DEBUG nova.virt.hardware [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1249.855272] env[61663]: DEBUG nova.virt.hardware [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1249.855420] env[61663]: DEBUG nova.virt.hardware [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1249.855634] env[61663]: DEBUG nova.virt.hardware [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1249.855797] env[61663]: DEBUG nova.virt.hardware [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1249.855962] env[61663]: DEBUG nova.virt.hardware [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1249.856142] env[61663]: DEBUG nova.virt.hardware [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1249.856317] env[61663]: DEBUG nova.virt.hardware [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1249.857213] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da2c040f-734e-4a3c-9697-1c8654f9f177 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.867912] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d322ad-3b22-43f0-9a2e-b5a4b7c375c1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.039940] env[61663]: DEBUG nova.policy [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0b278776a059466484a5e534f713e3bf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e841a35c2fe42e28403f4768875fd9b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1250.947686] env[61663]: DEBUG nova.network.neutron [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Successfully updated port: 7c42264c-fb03-4f0b-8e1f-d8e8f484fc9c {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1250.967450] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Acquiring lock "refresh_cache-790791ee-4e6c-4116-8ade-ba61f55ebd4d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1250.967450] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Acquired lock "refresh_cache-790791ee-4e6c-4116-8ade-ba61f55ebd4d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.967450] env[61663]: DEBUG nova.network.neutron [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1251.117748] env[61663]: DEBUG nova.network.neutron [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1251.273273] env[61663]: DEBUG nova.network.neutron [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Updating instance_info_cache with network_info: [{"id": "ce1d802a-f0d8-404b-89b7-6a22a35fed90", "address": "fa:16:3e:a1:85:50", "network": {"id": "c5fa3ef5-0d4a-47d7-99d9-90879158e4f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-176409592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "59d9fb219eec43ba81650805a579f52b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce1d802a-f0", "ovs_interfaceid": "ce1d802a-f0d8-404b-89b7-6a22a35fed90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1251.288180] env[61663]: DEBUG oslo_concurrency.lockutils [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Releasing lock "refresh_cache-71509f58-5616-4d6a-9a88-3bfd9d414a0c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1251.288485] env[61663]: DEBUG nova.compute.manager [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Instance network_info: |[{"id": "ce1d802a-f0d8-404b-89b7-6a22a35fed90", "address": "fa:16:3e:a1:85:50", "network": {"id": "c5fa3ef5-0d4a-47d7-99d9-90879158e4f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-176409592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "59d9fb219eec43ba81650805a579f52b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce1d802a-f0", "ovs_interfaceid": "ce1d802a-f0d8-404b-89b7-6a22a35fed90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1251.288891] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:85:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f762954-6ca5-4da5-bf0a-5d31c51ec570', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ce1d802a-f0d8-404b-89b7-6a22a35fed90', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1251.297941] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Creating folder: Project (59d9fb219eec43ba81650805a579f52b). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1251.298614] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6f212b91-2a86-4249-85ba-624cc0fce5f9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.313894] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Created folder: Project (59d9fb219eec43ba81650805a579f52b) in parent group-v352575. [ 1251.314104] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Creating folder: Instances. Parent ref: group-v352594. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1251.314966] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cf485348-dc95-4f7f-a358-5a3118e6e744 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.326036] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Created folder: Instances in parent group-v352594. [ 1251.326385] env[61663]: DEBUG oslo.service.loopingcall [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1251.326504] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1251.326807] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-add61e9f-8498-43a3-b65f-4b3eb27e472c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.349776] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1251.349776] env[61663]: value = "task-1690679" [ 1251.349776] env[61663]: _type = "Task" [ 1251.349776] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.361834] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690679, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.587959] env[61663]: DEBUG nova.network.neutron [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Updating instance_info_cache with network_info: [{"id": "7c42264c-fb03-4f0b-8e1f-d8e8f484fc9c", "address": "fa:16:3e:b6:b7:c5", "network": {"id": "b617d364-2c5d-4403-8bfa-d8fc619cea12", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1540080399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fd7b809f894a18b89bcf60fa56eac1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35fcdc55-dc29-451b-ad56-3a03b044dc81", "external-id": "nsx-vlan-transportzone-552", "segmentation_id": 552, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c42264c-fb", "ovs_interfaceid": "7c42264c-fb03-4f0b-8e1f-d8e8f484fc9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1251.601441] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Releasing lock "refresh_cache-790791ee-4e6c-4116-8ade-ba61f55ebd4d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1251.602165] env[61663]: DEBUG nova.compute.manager [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Instance network_info: |[{"id": "7c42264c-fb03-4f0b-8e1f-d8e8f484fc9c", "address": "fa:16:3e:b6:b7:c5", "network": {"id": "b617d364-2c5d-4403-8bfa-d8fc619cea12", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1540080399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fd7b809f894a18b89bcf60fa56eac1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35fcdc55-dc29-451b-ad56-3a03b044dc81", "external-id": "nsx-vlan-transportzone-552", "segmentation_id": 552, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c42264c-fb", "ovs_interfaceid": "7c42264c-fb03-4f0b-8e1f-d8e8f484fc9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1251.605957] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:b7:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35fcdc55-dc29-451b-ad56-3a03b044dc81', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c42264c-fb03-4f0b-8e1f-d8e8f484fc9c', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1251.614611] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Creating folder: Project (35fd7b809f894a18b89bcf60fa56eac1). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1251.615259] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-844e9115-1b7c-45b1-89bc-5fb70500521f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.632243] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Created folder: Project (35fd7b809f894a18b89bcf60fa56eac1) in parent group-v352575. [ 1251.632464] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Creating folder: Instances. Parent ref: group-v352597. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1251.632780] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81161526-57f5-44b7-a9ba-02055d005b3f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.642559] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Created folder: Instances in parent group-v352597. [ 1251.642832] env[61663]: DEBUG oslo.service.loopingcall [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1251.643363] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1251.643363] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-caf70d46-6fb8-43ab-91c4-19839879f4c9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.674578] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1251.674578] env[61663]: value = "task-1690682" [ 1251.674578] env[61663]: _type = "Task" [ 1251.674578] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.681991] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690682, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.859864] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690679, 'name': CreateVM_Task, 'duration_secs': 0.342116} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.863225] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1251.863579] env[61663]: DEBUG oslo_concurrency.lockutils [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1251.864351] env[61663]: DEBUG oslo_concurrency.lockutils [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1251.864351] env[61663]: DEBUG oslo_concurrency.lockutils [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1251.864597] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef7319d3-d9ce-4e1e-a137-b1fabe59754e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.869789] env[61663]: DEBUG oslo_vmware.api [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Waiting for the task: (returnval){ [ 1251.869789] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b29912-ad1a-b482-de69-cd6cfc4cca01" [ 1251.869789] env[61663]: _type = "Task" [ 1251.869789] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.884031] env[61663]: DEBUG oslo_vmware.api [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b29912-ad1a-b482-de69-cd6cfc4cca01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.133463] env[61663]: DEBUG nova.network.neutron [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Successfully created port: 417487b1-9e8c-4461-b3b6-a1088a797e16 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1252.181017] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690682, 'name': CreateVM_Task, 'duration_secs': 0.323487} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.181255] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1252.181960] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1252.379667] env[61663]: DEBUG oslo_concurrency.lockutils [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1252.379946] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1252.380038] env[61663]: DEBUG oslo_concurrency.lockutils [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1252.380274] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1252.380555] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1252.380809] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a1db44a-dd32-4caa-9e58-23065a475137 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.385656] env[61663]: DEBUG oslo_vmware.api [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Waiting for the task: (returnval){ [ 1252.385656] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52465167-64b6-6f44-df16-aeacc12192c8" [ 1252.385656] env[61663]: _type = "Task" [ 1252.385656] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.396708] env[61663]: DEBUG oslo_vmware.api [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52465167-64b6-6f44-df16-aeacc12192c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.636488] env[61663]: DEBUG nova.compute.manager [req-4d26bf2d-119e-4e4a-8f90-e28563de01bd req-dcc2f03a-d7f1-4aeb-bc27-f8ee50090b01 service nova] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Received event network-vif-plugged-ce1d802a-f0d8-404b-89b7-6a22a35fed90 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1252.636707] env[61663]: DEBUG oslo_concurrency.lockutils [req-4d26bf2d-119e-4e4a-8f90-e28563de01bd req-dcc2f03a-d7f1-4aeb-bc27-f8ee50090b01 service nova] Acquiring lock "71509f58-5616-4d6a-9a88-3bfd9d414a0c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1252.636907] env[61663]: DEBUG oslo_concurrency.lockutils [req-4d26bf2d-119e-4e4a-8f90-e28563de01bd req-dcc2f03a-d7f1-4aeb-bc27-f8ee50090b01 service nova] Lock "71509f58-5616-4d6a-9a88-3bfd9d414a0c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1252.637083] env[61663]: DEBUG oslo_concurrency.lockutils [req-4d26bf2d-119e-4e4a-8f90-e28563de01bd req-dcc2f03a-d7f1-4aeb-bc27-f8ee50090b01 service nova] Lock "71509f58-5616-4d6a-9a88-3bfd9d414a0c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1252.638418] env[61663]: DEBUG nova.compute.manager [req-4d26bf2d-119e-4e4a-8f90-e28563de01bd req-dcc2f03a-d7f1-4aeb-bc27-f8ee50090b01 service nova] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] No waiting events found dispatching network-vif-plugged-ce1d802a-f0d8-404b-89b7-6a22a35fed90 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1252.638418] env[61663]: WARNING nova.compute.manager [req-4d26bf2d-119e-4e4a-8f90-e28563de01bd req-dcc2f03a-d7f1-4aeb-bc27-f8ee50090b01 service nova] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Received unexpected event network-vif-plugged-ce1d802a-f0d8-404b-89b7-6a22a35fed90 for instance with vm_state building and task_state spawning. [ 1252.901050] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1252.901314] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1252.901522] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1252.949525] env[61663]: DEBUG oslo_concurrency.lockutils [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Acquiring lock "6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1252.949777] env[61663]: DEBUG oslo_concurrency.lockutils [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Lock "6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1252.968024] env[61663]: DEBUG nova.compute.manager [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1252.994966] env[61663]: DEBUG nova.compute.manager [req-4c71d212-0288-49fa-b47e-3f5aaf784beb req-e82cdcc7-efc4-4a55-a171-e715dcaba68d service nova] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Received event network-vif-plugged-7c42264c-fb03-4f0b-8e1f-d8e8f484fc9c {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1252.995795] env[61663]: DEBUG oslo_concurrency.lockutils [req-4c71d212-0288-49fa-b47e-3f5aaf784beb req-e82cdcc7-efc4-4a55-a171-e715dcaba68d service nova] Acquiring lock "790791ee-4e6c-4116-8ade-ba61f55ebd4d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1252.995795] env[61663]: DEBUG oslo_concurrency.lockutils [req-4c71d212-0288-49fa-b47e-3f5aaf784beb req-e82cdcc7-efc4-4a55-a171-e715dcaba68d service nova] Lock "790791ee-4e6c-4116-8ade-ba61f55ebd4d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1252.995795] env[61663]: DEBUG oslo_concurrency.lockutils [req-4c71d212-0288-49fa-b47e-3f5aaf784beb req-e82cdcc7-efc4-4a55-a171-e715dcaba68d service nova] Lock "790791ee-4e6c-4116-8ade-ba61f55ebd4d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1252.995795] env[61663]: DEBUG nova.compute.manager [req-4c71d212-0288-49fa-b47e-3f5aaf784beb req-e82cdcc7-efc4-4a55-a171-e715dcaba68d service nova] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] No waiting events found dispatching network-vif-plugged-7c42264c-fb03-4f0b-8e1f-d8e8f484fc9c {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1252.996305] env[61663]: WARNING nova.compute.manager [req-4c71d212-0288-49fa-b47e-3f5aaf784beb req-e82cdcc7-efc4-4a55-a171-e715dcaba68d service nova] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Received unexpected event network-vif-plugged-7c42264c-fb03-4f0b-8e1f-d8e8f484fc9c for instance with vm_state building and task_state spawning. [ 1253.048496] env[61663]: DEBUG oslo_concurrency.lockutils [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1253.048496] env[61663]: DEBUG oslo_concurrency.lockutils [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1253.048834] env[61663]: INFO nova.compute.claims [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1253.317776] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0ea3c2-e54b-4ac9-8e2f-3dde7bff9c30 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.324134] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d4efb8-482f-4df9-8f34-a82dac569157 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.359559] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c54871c-4db4-428e-89f9-1f0266200ff9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.368830] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cdcd541-25a1-42d0-810a-7772ec13a8ae {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.385606] env[61663]: DEBUG nova.compute.provider_tree [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1253.399326] env[61663]: DEBUG nova.scheduler.client.report [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1253.415944] env[61663]: DEBUG oslo_concurrency.lockutils [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.369s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1253.416512] env[61663]: DEBUG nova.compute.manager [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1253.464228] env[61663]: DEBUG nova.compute.utils [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1253.465936] env[61663]: DEBUG nova.compute.manager [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1253.465936] env[61663]: DEBUG nova.network.neutron [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1253.479586] env[61663]: DEBUG nova.compute.manager [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1253.562149] env[61663]: DEBUG nova.compute.manager [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1253.610535] env[61663]: DEBUG nova.virt.hardware [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1253.611787] env[61663]: DEBUG nova.virt.hardware [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1253.611787] env[61663]: DEBUG nova.virt.hardware [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1253.611787] env[61663]: DEBUG nova.virt.hardware [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1253.611787] env[61663]: DEBUG nova.virt.hardware [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1253.611787] env[61663]: DEBUG nova.virt.hardware [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1253.612174] env[61663]: DEBUG nova.virt.hardware [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1253.612174] env[61663]: DEBUG nova.virt.hardware [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1253.612174] env[61663]: DEBUG nova.virt.hardware [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1253.612373] env[61663]: DEBUG nova.virt.hardware [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1253.612404] env[61663]: DEBUG nova.virt.hardware [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1253.613644] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-912042a5-9d0f-4d93-bc76-568166d64138 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.620693] env[61663]: DEBUG nova.policy [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b9ccd477bd26463580897be0257e4398', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35fd7b809f894a18b89bcf60fa56eac1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1253.626685] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-759a45e0-043f-49fc-8122-eb852d2a3d30 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.018626] env[61663]: DEBUG oslo_concurrency.lockutils [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Acquiring lock "6f7a3a1f-859d-42f5-b986-6a1a038ca536" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1254.018859] env[61663]: DEBUG oslo_concurrency.lockutils [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Lock "6f7a3a1f-859d-42f5-b986-6a1a038ca536" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1254.300424] env[61663]: DEBUG nova.network.neutron [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Successfully updated port: 417487b1-9e8c-4461-b3b6-a1088a797e16 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1254.318037] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Acquiring lock "refresh_cache-f9a675b6-e76d-492b-ac34-3c7b10553fca" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1254.318037] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Acquired lock "refresh_cache-f9a675b6-e76d-492b-ac34-3c7b10553fca" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.318037] env[61663]: DEBUG nova.network.neutron [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1254.414231] env[61663]: DEBUG nova.network.neutron [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1254.564965] env[61663]: DEBUG nova.network.neutron [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Successfully created port: 23b625c7-db76-4040-9e08-7b25f2fb9433 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1255.107326] env[61663]: DEBUG nova.network.neutron [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Updating instance_info_cache with network_info: [{"id": "417487b1-9e8c-4461-b3b6-a1088a797e16", "address": "fa:16:3e:bc:05:5d", "network": {"id": "d0bdae79-f7a6-43d5-941d-af47437f9fce", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1059103520-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e841a35c2fe42e28403f4768875fd9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5756d009-13ad-4e13-a991-3b5e71830aa5", "external-id": "nsx-vlan-transportzone-608", "segmentation_id": 608, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap417487b1-9e", "ovs_interfaceid": "417487b1-9e8c-4461-b3b6-a1088a797e16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.125334] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Releasing lock "refresh_cache-f9a675b6-e76d-492b-ac34-3c7b10553fca" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1255.125635] env[61663]: DEBUG nova.compute.manager [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Instance network_info: |[{"id": "417487b1-9e8c-4461-b3b6-a1088a797e16", "address": "fa:16:3e:bc:05:5d", "network": {"id": "d0bdae79-f7a6-43d5-941d-af47437f9fce", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1059103520-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e841a35c2fe42e28403f4768875fd9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5756d009-13ad-4e13-a991-3b5e71830aa5", "external-id": "nsx-vlan-transportzone-608", "segmentation_id": 608, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap417487b1-9e", "ovs_interfaceid": "417487b1-9e8c-4461-b3b6-a1088a797e16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1255.127016] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:05:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5756d009-13ad-4e13-a991-3b5e71830aa5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '417487b1-9e8c-4461-b3b6-a1088a797e16', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1255.135035] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Creating folder: Project (7e841a35c2fe42e28403f4768875fd9b). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1255.136059] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2e22a0f4-c35a-49f2-8dce-13a2d368b2e1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.149560] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Created folder: Project (7e841a35c2fe42e28403f4768875fd9b) in parent group-v352575. [ 1255.150410] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Creating folder: Instances. Parent ref: group-v352600. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1255.150410] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-83877b93-1f24-4197-9b0c-69cf38c16bcc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.164703] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Created folder: Instances in parent group-v352600. [ 1255.164703] env[61663]: DEBUG oslo.service.loopingcall [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1255.164822] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1255.167050] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b2558e67-9f36-4f40-b360-a7efb949d443 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.185136] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1255.185136] env[61663]: value = "task-1690685" [ 1255.185136] env[61663]: _type = "Task" [ 1255.185136] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.193647] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690685, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.702115] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690685, 'name': CreateVM_Task, 'duration_secs': 0.363213} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.702115] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1255.702527] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1255.702597] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1255.702935] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1255.703484] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac5545c8-40f9-49aa-a10a-bacdea5650bc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.710627] env[61663]: DEBUG oslo_vmware.api [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Waiting for the task: (returnval){ [ 1255.710627] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e142a1-9428-0460-1656-6795c8cdaf54" [ 1255.710627] env[61663]: _type = "Task" [ 1255.710627] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.719841] env[61663]: DEBUG oslo_vmware.api [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e142a1-9428-0460-1656-6795c8cdaf54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.153141] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "98eb6b3f-69c8-4837-9b5c-a1485fe5cab6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1256.153703] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "98eb6b3f-69c8-4837-9b5c-a1485fe5cab6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1256.221579] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1256.221579] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1256.221579] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1256.405318] env[61663]: DEBUG nova.compute.manager [req-31d059a2-60a1-405b-9b79-19a17f75ef64 req-388befa6-89b0-4a28-acc4-2eeb6ad9de2a service nova] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Received event network-changed-ce1d802a-f0d8-404b-89b7-6a22a35fed90 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1256.406137] env[61663]: DEBUG nova.compute.manager [req-31d059a2-60a1-405b-9b79-19a17f75ef64 req-388befa6-89b0-4a28-acc4-2eeb6ad9de2a service nova] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Refreshing instance network info cache due to event network-changed-ce1d802a-f0d8-404b-89b7-6a22a35fed90. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1256.406417] env[61663]: DEBUG oslo_concurrency.lockutils [req-31d059a2-60a1-405b-9b79-19a17f75ef64 req-388befa6-89b0-4a28-acc4-2eeb6ad9de2a service nova] Acquiring lock "refresh_cache-71509f58-5616-4d6a-9a88-3bfd9d414a0c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1256.406589] env[61663]: DEBUG oslo_concurrency.lockutils [req-31d059a2-60a1-405b-9b79-19a17f75ef64 req-388befa6-89b0-4a28-acc4-2eeb6ad9de2a service nova] Acquired lock "refresh_cache-71509f58-5616-4d6a-9a88-3bfd9d414a0c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1256.407585] env[61663]: DEBUG nova.network.neutron [req-31d059a2-60a1-405b-9b79-19a17f75ef64 req-388befa6-89b0-4a28-acc4-2eeb6ad9de2a service nova] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Refreshing network info cache for port ce1d802a-f0d8-404b-89b7-6a22a35fed90 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1256.524668] env[61663]: DEBUG nova.compute.manager [req-6e6286d4-356b-44aa-a5a4-56bce97b2e7f req-b0cc6b17-b967-4f12-9688-406305f325cc service nova] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Received event network-changed-7c42264c-fb03-4f0b-8e1f-d8e8f484fc9c {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1256.527115] env[61663]: DEBUG nova.compute.manager [req-6e6286d4-356b-44aa-a5a4-56bce97b2e7f req-b0cc6b17-b967-4f12-9688-406305f325cc service nova] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Refreshing instance network info cache due to event network-changed-7c42264c-fb03-4f0b-8e1f-d8e8f484fc9c. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1256.527458] env[61663]: DEBUG oslo_concurrency.lockutils [req-6e6286d4-356b-44aa-a5a4-56bce97b2e7f req-b0cc6b17-b967-4f12-9688-406305f325cc service nova] Acquiring lock "refresh_cache-790791ee-4e6c-4116-8ade-ba61f55ebd4d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1256.528281] env[61663]: DEBUG oslo_concurrency.lockutils [req-6e6286d4-356b-44aa-a5a4-56bce97b2e7f req-b0cc6b17-b967-4f12-9688-406305f325cc service nova] Acquired lock "refresh_cache-790791ee-4e6c-4116-8ade-ba61f55ebd4d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1256.528281] env[61663]: DEBUG nova.network.neutron [req-6e6286d4-356b-44aa-a5a4-56bce97b2e7f req-b0cc6b17-b967-4f12-9688-406305f325cc service nova] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Refreshing network info cache for port 7c42264c-fb03-4f0b-8e1f-d8e8f484fc9c {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1256.647240] env[61663]: DEBUG nova.network.neutron [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Successfully updated port: 23b625c7-db76-4040-9e08-7b25f2fb9433 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1256.663682] env[61663]: DEBUG oslo_concurrency.lockutils [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Acquiring lock "refresh_cache-6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1256.664009] env[61663]: DEBUG oslo_concurrency.lockutils [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Acquired lock "refresh_cache-6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1256.664009] env[61663]: DEBUG nova.network.neutron [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1256.783033] env[61663]: DEBUG nova.network.neutron [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1257.101564] env[61663]: DEBUG nova.network.neutron [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Updating instance_info_cache with network_info: [{"id": "23b625c7-db76-4040-9e08-7b25f2fb9433", "address": "fa:16:3e:d3:b4:7d", "network": {"id": "b617d364-2c5d-4403-8bfa-d8fc619cea12", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1540080399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fd7b809f894a18b89bcf60fa56eac1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35fcdc55-dc29-451b-ad56-3a03b044dc81", "external-id": "nsx-vlan-transportzone-552", "segmentation_id": 552, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23b625c7-db", "ovs_interfaceid": "23b625c7-db76-4040-9e08-7b25f2fb9433", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.121703] env[61663]: DEBUG oslo_concurrency.lockutils [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Releasing lock "refresh_cache-6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1257.122023] env[61663]: DEBUG nova.compute.manager [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Instance network_info: |[{"id": "23b625c7-db76-4040-9e08-7b25f2fb9433", "address": "fa:16:3e:d3:b4:7d", "network": {"id": "b617d364-2c5d-4403-8bfa-d8fc619cea12", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1540080399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fd7b809f894a18b89bcf60fa56eac1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35fcdc55-dc29-451b-ad56-3a03b044dc81", "external-id": "nsx-vlan-transportzone-552", "segmentation_id": 552, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23b625c7-db", "ovs_interfaceid": "23b625c7-db76-4040-9e08-7b25f2fb9433", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1257.122445] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:b4:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35fcdc55-dc29-451b-ad56-3a03b044dc81', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '23b625c7-db76-4040-9e08-7b25f2fb9433', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1257.130909] env[61663]: DEBUG oslo.service.loopingcall [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1257.131784] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1257.132055] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-28fd2947-265b-4e4b-94bc-b266472da8b0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.162321] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1257.162321] env[61663]: value = "task-1690686" [ 1257.162321] env[61663]: _type = "Task" [ 1257.162321] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.170989] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690686, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.572702] env[61663]: DEBUG nova.network.neutron [req-31d059a2-60a1-405b-9b79-19a17f75ef64 req-388befa6-89b0-4a28-acc4-2eeb6ad9de2a service nova] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Updated VIF entry in instance network info cache for port ce1d802a-f0d8-404b-89b7-6a22a35fed90. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1257.573086] env[61663]: DEBUG nova.network.neutron [req-31d059a2-60a1-405b-9b79-19a17f75ef64 req-388befa6-89b0-4a28-acc4-2eeb6ad9de2a service nova] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Updating instance_info_cache with network_info: [{"id": "ce1d802a-f0d8-404b-89b7-6a22a35fed90", "address": "fa:16:3e:a1:85:50", "network": {"id": "c5fa3ef5-0d4a-47d7-99d9-90879158e4f4", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-176409592-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "59d9fb219eec43ba81650805a579f52b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f762954-6ca5-4da5-bf0a-5d31c51ec570", "external-id": "nsx-vlan-transportzone-930", "segmentation_id": 930, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce1d802a-f0", "ovs_interfaceid": "ce1d802a-f0d8-404b-89b7-6a22a35fed90", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.593264] env[61663]: DEBUG oslo_concurrency.lockutils [req-31d059a2-60a1-405b-9b79-19a17f75ef64 req-388befa6-89b0-4a28-acc4-2eeb6ad9de2a service nova] Releasing lock "refresh_cache-71509f58-5616-4d6a-9a88-3bfd9d414a0c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1257.593901] env[61663]: DEBUG nova.compute.manager [req-31d059a2-60a1-405b-9b79-19a17f75ef64 req-388befa6-89b0-4a28-acc4-2eeb6ad9de2a service nova] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Received event network-vif-plugged-417487b1-9e8c-4461-b3b6-a1088a797e16 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1257.594694] env[61663]: DEBUG oslo_concurrency.lockutils [req-31d059a2-60a1-405b-9b79-19a17f75ef64 req-388befa6-89b0-4a28-acc4-2eeb6ad9de2a service nova] Acquiring lock "f9a675b6-e76d-492b-ac34-3c7b10553fca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1257.594694] env[61663]: DEBUG oslo_concurrency.lockutils [req-31d059a2-60a1-405b-9b79-19a17f75ef64 req-388befa6-89b0-4a28-acc4-2eeb6ad9de2a service nova] Lock "f9a675b6-e76d-492b-ac34-3c7b10553fca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1257.594694] env[61663]: DEBUG oslo_concurrency.lockutils [req-31d059a2-60a1-405b-9b79-19a17f75ef64 req-388befa6-89b0-4a28-acc4-2eeb6ad9de2a service nova] Lock "f9a675b6-e76d-492b-ac34-3c7b10553fca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1257.594974] env[61663]: DEBUG nova.compute.manager [req-31d059a2-60a1-405b-9b79-19a17f75ef64 req-388befa6-89b0-4a28-acc4-2eeb6ad9de2a service nova] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] No waiting events found dispatching network-vif-plugged-417487b1-9e8c-4461-b3b6-a1088a797e16 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1257.595073] env[61663]: WARNING nova.compute.manager [req-31d059a2-60a1-405b-9b79-19a17f75ef64 req-388befa6-89b0-4a28-acc4-2eeb6ad9de2a service nova] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Received unexpected event network-vif-plugged-417487b1-9e8c-4461-b3b6-a1088a797e16 for instance with vm_state building and task_state spawning. [ 1257.595073] env[61663]: DEBUG nova.compute.manager [req-31d059a2-60a1-405b-9b79-19a17f75ef64 req-388befa6-89b0-4a28-acc4-2eeb6ad9de2a service nova] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Received event network-changed-417487b1-9e8c-4461-b3b6-a1088a797e16 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1257.595073] env[61663]: DEBUG nova.compute.manager [req-31d059a2-60a1-405b-9b79-19a17f75ef64 req-388befa6-89b0-4a28-acc4-2eeb6ad9de2a service nova] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Refreshing instance network info cache due to event network-changed-417487b1-9e8c-4461-b3b6-a1088a797e16. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1257.595636] env[61663]: DEBUG oslo_concurrency.lockutils [req-31d059a2-60a1-405b-9b79-19a17f75ef64 req-388befa6-89b0-4a28-acc4-2eeb6ad9de2a service nova] Acquiring lock "refresh_cache-f9a675b6-e76d-492b-ac34-3c7b10553fca" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1257.595798] env[61663]: DEBUG oslo_concurrency.lockutils [req-31d059a2-60a1-405b-9b79-19a17f75ef64 req-388befa6-89b0-4a28-acc4-2eeb6ad9de2a service nova] Acquired lock "refresh_cache-f9a675b6-e76d-492b-ac34-3c7b10553fca" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1257.596129] env[61663]: DEBUG nova.network.neutron [req-31d059a2-60a1-405b-9b79-19a17f75ef64 req-388befa6-89b0-4a28-acc4-2eeb6ad9de2a service nova] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Refreshing network info cache for port 417487b1-9e8c-4461-b3b6-a1088a797e16 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1257.675621] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690686, 'name': CreateVM_Task, 'duration_secs': 0.365075} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.675621] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1257.676682] env[61663]: DEBUG oslo_concurrency.lockutils [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1257.676682] env[61663]: DEBUG oslo_concurrency.lockutils [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1257.676916] env[61663]: DEBUG oslo_concurrency.lockutils [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1257.678027] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbea6d0c-2bd0-4713-b8d1-94e221fd5cc2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.682791] env[61663]: DEBUG oslo_vmware.api [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Waiting for the task: (returnval){ [ 1257.682791] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525bf56d-8131-9183-9118-2744730ab6f0" [ 1257.682791] env[61663]: _type = "Task" [ 1257.682791] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.691058] env[61663]: DEBUG oslo_vmware.api [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525bf56d-8131-9183-9118-2744730ab6f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.743022] env[61663]: DEBUG nova.network.neutron [req-6e6286d4-356b-44aa-a5a4-56bce97b2e7f req-b0cc6b17-b967-4f12-9688-406305f325cc service nova] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Updated VIF entry in instance network info cache for port 7c42264c-fb03-4f0b-8e1f-d8e8f484fc9c. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1257.743291] env[61663]: DEBUG nova.network.neutron [req-6e6286d4-356b-44aa-a5a4-56bce97b2e7f req-b0cc6b17-b967-4f12-9688-406305f325cc service nova] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Updating instance_info_cache with network_info: [{"id": "7c42264c-fb03-4f0b-8e1f-d8e8f484fc9c", "address": "fa:16:3e:b6:b7:c5", "network": {"id": "b617d364-2c5d-4403-8bfa-d8fc619cea12", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1540080399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fd7b809f894a18b89bcf60fa56eac1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35fcdc55-dc29-451b-ad56-3a03b044dc81", "external-id": "nsx-vlan-transportzone-552", "segmentation_id": 552, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c42264c-fb", "ovs_interfaceid": "7c42264c-fb03-4f0b-8e1f-d8e8f484fc9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.758442] env[61663]: DEBUG oslo_concurrency.lockutils [req-6e6286d4-356b-44aa-a5a4-56bce97b2e7f req-b0cc6b17-b967-4f12-9688-406305f325cc service nova] Releasing lock "refresh_cache-790791ee-4e6c-4116-8ade-ba61f55ebd4d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1258.197300] env[61663]: DEBUG oslo_concurrency.lockutils [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1258.198389] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1258.198646] env[61663]: DEBUG oslo_concurrency.lockutils [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1258.275221] env[61663]: DEBUG nova.network.neutron [req-31d059a2-60a1-405b-9b79-19a17f75ef64 req-388befa6-89b0-4a28-acc4-2eeb6ad9de2a service nova] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Updated VIF entry in instance network info cache for port 417487b1-9e8c-4461-b3b6-a1088a797e16. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1258.275608] env[61663]: DEBUG nova.network.neutron [req-31d059a2-60a1-405b-9b79-19a17f75ef64 req-388befa6-89b0-4a28-acc4-2eeb6ad9de2a service nova] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Updating instance_info_cache with network_info: [{"id": "417487b1-9e8c-4461-b3b6-a1088a797e16", "address": "fa:16:3e:bc:05:5d", "network": {"id": "d0bdae79-f7a6-43d5-941d-af47437f9fce", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1059103520-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e841a35c2fe42e28403f4768875fd9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5756d009-13ad-4e13-a991-3b5e71830aa5", "external-id": "nsx-vlan-transportzone-608", "segmentation_id": 608, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap417487b1-9e", "ovs_interfaceid": "417487b1-9e8c-4461-b3b6-a1088a797e16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1258.288903] env[61663]: DEBUG oslo_concurrency.lockutils [req-31d059a2-60a1-405b-9b79-19a17f75ef64 req-388befa6-89b0-4a28-acc4-2eeb6ad9de2a service nova] Releasing lock "refresh_cache-f9a675b6-e76d-492b-ac34-3c7b10553fca" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1258.764784] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Acquiring lock "ee0e3e54-c135-489f-87ca-f441efebcbd5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1258.765053] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Lock "ee0e3e54-c135-489f-87ca-f441efebcbd5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1259.927618] env[61663]: DEBUG oslo_concurrency.lockutils [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Acquiring lock "04488672-86c4-415b-961e-94641d570112" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1259.927618] env[61663]: DEBUG oslo_concurrency.lockutils [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Lock "04488672-86c4-415b-961e-94641d570112" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1260.130110] env[61663]: DEBUG oslo_concurrency.lockutils [None req-eb0262fd-69a1-49a7-a7da-79f570b4a6ec tempest-ImagesNegativeTestJSON-285249914 tempest-ImagesNegativeTestJSON-285249914-project-member] Acquiring lock "fffb383d-e1db-4640-9201-0ea897c472d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1260.130554] env[61663]: DEBUG oslo_concurrency.lockutils [None req-eb0262fd-69a1-49a7-a7da-79f570b4a6ec tempest-ImagesNegativeTestJSON-285249914 tempest-ImagesNegativeTestJSON-285249914-project-member] Lock "fffb383d-e1db-4640-9201-0ea897c472d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1261.082329] env[61663]: DEBUG nova.compute.manager [req-f91d909e-60d6-47cd-bd2c-194459789ab1 req-84f8ad38-7da3-4aba-98c9-f6b433f90cb8 service nova] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Received event network-vif-plugged-23b625c7-db76-4040-9e08-7b25f2fb9433 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1261.082975] env[61663]: DEBUG oslo_concurrency.lockutils [req-f91d909e-60d6-47cd-bd2c-194459789ab1 req-84f8ad38-7da3-4aba-98c9-f6b433f90cb8 service nova] Acquiring lock "6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1261.082975] env[61663]: DEBUG oslo_concurrency.lockutils [req-f91d909e-60d6-47cd-bd2c-194459789ab1 req-84f8ad38-7da3-4aba-98c9-f6b433f90cb8 service nova] Lock "6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1261.082975] env[61663]: DEBUG oslo_concurrency.lockutils [req-f91d909e-60d6-47cd-bd2c-194459789ab1 req-84f8ad38-7da3-4aba-98c9-f6b433f90cb8 service nova] Lock "6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1261.088160] env[61663]: DEBUG nova.compute.manager [req-f91d909e-60d6-47cd-bd2c-194459789ab1 req-84f8ad38-7da3-4aba-98c9-f6b433f90cb8 service nova] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] No waiting events found dispatching network-vif-plugged-23b625c7-db76-4040-9e08-7b25f2fb9433 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1261.088160] env[61663]: WARNING nova.compute.manager [req-f91d909e-60d6-47cd-bd2c-194459789ab1 req-84f8ad38-7da3-4aba-98c9-f6b433f90cb8 service nova] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Received unexpected event network-vif-plugged-23b625c7-db76-4040-9e08-7b25f2fb9433 for instance with vm_state building and task_state spawning. [ 1261.088160] env[61663]: DEBUG nova.compute.manager [req-f91d909e-60d6-47cd-bd2c-194459789ab1 req-84f8ad38-7da3-4aba-98c9-f6b433f90cb8 service nova] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Received event network-changed-23b625c7-db76-4040-9e08-7b25f2fb9433 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1261.088160] env[61663]: DEBUG nova.compute.manager [req-f91d909e-60d6-47cd-bd2c-194459789ab1 req-84f8ad38-7da3-4aba-98c9-f6b433f90cb8 service nova] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Refreshing instance network info cache due to event network-changed-23b625c7-db76-4040-9e08-7b25f2fb9433. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1261.088160] env[61663]: DEBUG oslo_concurrency.lockutils [req-f91d909e-60d6-47cd-bd2c-194459789ab1 req-84f8ad38-7da3-4aba-98c9-f6b433f90cb8 service nova] Acquiring lock "refresh_cache-6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1261.088465] env[61663]: DEBUG oslo_concurrency.lockutils [req-f91d909e-60d6-47cd-bd2c-194459789ab1 req-84f8ad38-7da3-4aba-98c9-f6b433f90cb8 service nova] Acquired lock "refresh_cache-6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1261.088465] env[61663]: DEBUG nova.network.neutron [req-f91d909e-60d6-47cd-bd2c-194459789ab1 req-84f8ad38-7da3-4aba-98c9-f6b433f90cb8 service nova] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Refreshing network info cache for port 23b625c7-db76-4040-9e08-7b25f2fb9433 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1262.086719] env[61663]: DEBUG nova.network.neutron [req-f91d909e-60d6-47cd-bd2c-194459789ab1 req-84f8ad38-7da3-4aba-98c9-f6b433f90cb8 service nova] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Updated VIF entry in instance network info cache for port 23b625c7-db76-4040-9e08-7b25f2fb9433. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1262.086719] env[61663]: DEBUG nova.network.neutron [req-f91d909e-60d6-47cd-bd2c-194459789ab1 req-84f8ad38-7da3-4aba-98c9-f6b433f90cb8 service nova] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Updating instance_info_cache with network_info: [{"id": "23b625c7-db76-4040-9e08-7b25f2fb9433", "address": "fa:16:3e:d3:b4:7d", "network": {"id": "b617d364-2c5d-4403-8bfa-d8fc619cea12", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1540080399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35fd7b809f894a18b89bcf60fa56eac1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35fcdc55-dc29-451b-ad56-3a03b044dc81", "external-id": "nsx-vlan-transportzone-552", "segmentation_id": 552, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23b625c7-db", "ovs_interfaceid": "23b625c7-db76-4040-9e08-7b25f2fb9433", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.105111] env[61663]: DEBUG oslo_concurrency.lockutils [req-f91d909e-60d6-47cd-bd2c-194459789ab1 req-84f8ad38-7da3-4aba-98c9-f6b433f90cb8 service nova] Releasing lock "refresh_cache-6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1266.930118] env[61663]: DEBUG oslo_concurrency.lockutils [None req-6076379f-4c07-4d68-96ce-69c60b736ecc tempest-ServersWithSpecificFlavorTestJSON-1727622844 tempest-ServersWithSpecificFlavorTestJSON-1727622844-project-member] Acquiring lock "4ef40272-deb2-414a-b0ac-0bc30ba2bc84" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1266.930118] env[61663]: DEBUG oslo_concurrency.lockutils [None req-6076379f-4c07-4d68-96ce-69c60b736ecc tempest-ServersWithSpecificFlavorTestJSON-1727622844 tempest-ServersWithSpecificFlavorTestJSON-1727622844-project-member] Lock "4ef40272-deb2-414a-b0ac-0bc30ba2bc84" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1270.245303] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d63d7339-275a-47da-b869-fd4577a50edb tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Acquiring lock "879dce1e-340a-48d0-9291-857f39fec597" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1270.245658] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d63d7339-275a-47da-b869-fd4577a50edb tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Lock "879dce1e-340a-48d0-9291-857f39fec597" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1270.284857] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d63d7339-275a-47da-b869-fd4577a50edb tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Acquiring lock "f8dbebee-1ccc-4af8-a302-75e2f819161d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1270.284857] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d63d7339-275a-47da-b869-fd4577a50edb tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Lock "f8dbebee-1ccc-4af8-a302-75e2f819161d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1271.246657] env[61663]: DEBUG oslo_concurrency.lockutils [None req-baa02889-5d76-43b4-b99e-5a984d78f501 tempest-AttachInterfacesUnderV243Test-810853206 tempest-AttachInterfacesUnderV243Test-810853206-project-member] Acquiring lock "10cb6a2e-4a08-453e-9372-4ea14958470d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1271.246914] env[61663]: DEBUG oslo_concurrency.lockutils [None req-baa02889-5d76-43b4-b99e-5a984d78f501 tempest-AttachInterfacesUnderV243Test-810853206 tempest-AttachInterfacesUnderV243Test-810853206-project-member] Lock "10cb6a2e-4a08-453e-9372-4ea14958470d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1272.136043] env[61663]: DEBUG oslo_concurrency.lockutils [None req-152a630f-8837-4755-add1-2707b6f242c8 tempest-MigrationsAdminTest-364237900 tempest-MigrationsAdminTest-364237900-project-member] Acquiring lock "29c17291-50be-45a3-93c6-76bc8c7cbbb4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1272.136043] env[61663]: DEBUG oslo_concurrency.lockutils [None req-152a630f-8837-4755-add1-2707b6f242c8 tempest-MigrationsAdminTest-364237900 tempest-MigrationsAdminTest-364237900-project-member] Lock "29c17291-50be-45a3-93c6-76bc8c7cbbb4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1276.137103] env[61663]: WARNING oslo_vmware.rw_handles [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1276.137103] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1276.137103] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1276.137103] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1276.137103] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1276.137103] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 1276.137103] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1276.137103] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1276.137103] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1276.137103] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1276.137103] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1276.137103] env[61663]: ERROR oslo_vmware.rw_handles [ 1276.137890] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/8fd37d02-df6f-4b52-af5d-e9af8deee719/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1276.138958] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1276.139275] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Copying Virtual Disk [datastore1] vmware_temp/8fd37d02-df6f-4b52-af5d-e9af8deee719/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/8fd37d02-df6f-4b52-af5d-e9af8deee719/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1276.139574] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af0e738b-45aa-4e05-b41b-88c5483691fa {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.148275] env[61663]: DEBUG oslo_vmware.api [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Waiting for the task: (returnval){ [ 1276.148275] env[61663]: value = "task-1690687" [ 1276.148275] env[61663]: _type = "Task" [ 1276.148275] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.156557] env[61663]: DEBUG oslo_vmware.api [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Task: {'id': task-1690687, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.572030] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Acquiring lock "fde11c71-3511-4fea-84c7-0e7de062951b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.572030] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Lock "fde11c71-3511-4fea-84c7-0e7de062951b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1276.660394] env[61663]: DEBUG oslo_vmware.exceptions [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1276.660709] env[61663]: DEBUG oslo_concurrency.lockutils [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1276.664182] env[61663]: ERROR nova.compute.manager [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1276.664182] env[61663]: Faults: ['InvalidArgument'] [ 1276.664182] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Traceback (most recent call last): [ 1276.664182] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1276.664182] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] yield resources [ 1276.664182] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1276.664182] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] self.driver.spawn(context, instance, image_meta, [ 1276.664182] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1276.664182] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1276.664182] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1276.664182] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] self._fetch_image_if_missing(context, vi) [ 1276.664182] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1276.664551] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] image_cache(vi, tmp_image_ds_loc) [ 1276.664551] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1276.664551] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] vm_util.copy_virtual_disk( [ 1276.664551] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1276.664551] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] session._wait_for_task(vmdk_copy_task) [ 1276.664551] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1276.664551] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] return self.wait_for_task(task_ref) [ 1276.664551] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1276.664551] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] return evt.wait() [ 1276.664551] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1276.664551] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] result = hub.switch() [ 1276.664551] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1276.664551] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] return self.greenlet.switch() [ 1276.665013] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1276.665013] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] self.f(*self.args, **self.kw) [ 1276.665013] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1276.665013] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] raise exceptions.translate_fault(task_info.error) [ 1276.665013] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1276.665013] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Faults: ['InvalidArgument'] [ 1276.665013] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] [ 1276.665013] env[61663]: INFO nova.compute.manager [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Terminating instance [ 1276.666140] env[61663]: DEBUG oslo_concurrency.lockutils [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.666351] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1276.666594] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a8a5da8-7726-46d1-ac40-16b329666537 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.669749] env[61663]: DEBUG oslo_concurrency.lockutils [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Acquiring lock "refresh_cache-7f516750-b7ee-471b-a386-b898aac3985c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1276.669913] env[61663]: DEBUG oslo_concurrency.lockutils [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Acquired lock "refresh_cache-7f516750-b7ee-471b-a386-b898aac3985c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.670095] env[61663]: DEBUG nova.network.neutron [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1276.677644] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1276.677802] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1276.679223] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-466005e2-af9f-4bf4-8cb5-5b8db6fd3396 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.690727] env[61663]: DEBUG oslo_vmware.api [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Waiting for the task: (returnval){ [ 1276.690727] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527d6904-67f2-00e9-6169-2b747999a2e8" [ 1276.690727] env[61663]: _type = "Task" [ 1276.690727] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.699606] env[61663]: DEBUG oslo_vmware.api [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527d6904-67f2-00e9-6169-2b747999a2e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.716902] env[61663]: DEBUG nova.network.neutron [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1276.888843] env[61663]: DEBUG nova.network.neutron [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1276.905205] env[61663]: DEBUG oslo_concurrency.lockutils [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Releasing lock "refresh_cache-7f516750-b7ee-471b-a386-b898aac3985c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1276.905205] env[61663]: DEBUG nova.compute.manager [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1276.905205] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1276.906336] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da875c1-a6b5-41cc-99cf-f4a073b4eef4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.917581] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1276.917837] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-247ffab1-5303-4bb9-884a-9e39a7dfd23a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.946120] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1276.946120] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1276.946280] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Deleting the datastore file [datastore1] 7f516750-b7ee-471b-a386-b898aac3985c {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1276.948206] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7af82132-52f4-4d00-9c9c-3c43a0b15e61 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.954694] env[61663]: DEBUG oslo_vmware.api [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Waiting for the task: (returnval){ [ 1276.954694] env[61663]: value = "task-1690689" [ 1276.954694] env[61663]: _type = "Task" [ 1276.954694] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.964154] env[61663]: DEBUG oslo_vmware.api [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Task: {'id': task-1690689, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.201345] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1277.201673] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Creating directory with path [datastore1] vmware_temp/38a5b72d-930e-4529-9f26-f392e22e19fb/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1277.201876] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-def27d10-6458-4198-8c86-bf37e1ab9d06 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.214415] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Created directory with path [datastore1] vmware_temp/38a5b72d-930e-4529-9f26-f392e22e19fb/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1277.214524] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Fetch image to [datastore1] vmware_temp/38a5b72d-930e-4529-9f26-f392e22e19fb/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1277.214904] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/38a5b72d-930e-4529-9f26-f392e22e19fb/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1277.215515] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e106a3-a7f0-4a91-93c2-f1ebf72e44c3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.222734] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235d5252-5990-4c40-8ada-78d4526e5a11 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.233552] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80597e8-1993-4727-b7d0-952c5b5810d6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.271863] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53fa2bb3-42bb-4946-a753-53dc1f56080e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.279092] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2f2483f7-a88c-45d9-b095-2d4c00bc6336 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.300601] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1277.381053] env[61663]: DEBUG oslo_vmware.rw_handles [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/38a5b72d-930e-4529-9f26-f392e22e19fb/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1277.462561] env[61663]: DEBUG oslo_vmware.rw_handles [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1277.462732] env[61663]: DEBUG oslo_vmware.rw_handles [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/38a5b72d-930e-4529-9f26-f392e22e19fb/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1277.469541] env[61663]: DEBUG oslo_vmware.api [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Task: {'id': task-1690689, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.035798} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.469718] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1277.469931] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1277.470187] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1277.470667] env[61663]: INFO nova.compute.manager [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Took 0.57 seconds to destroy the instance on the hypervisor. [ 1277.470976] env[61663]: DEBUG oslo.service.loopingcall [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1277.471435] env[61663]: DEBUG nova.compute.manager [-] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Skipping network deallocation for instance since networking was not requested. {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1277.473873] env[61663]: DEBUG nova.compute.claims [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1277.474056] env[61663]: DEBUG oslo_concurrency.lockutils [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1277.474279] env[61663]: DEBUG oslo_concurrency.lockutils [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.871721] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1afa5fd-da9b-437d-b9a9-f86e37919f84 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.879884] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9fb78ab-a3f6-48d4-b5d9-2bd8dec79b97 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.914598] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73bb216-87f8-4239-a2c3-e71e9a7f141f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.922047] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4648dd84-1f10-4d72-9690-3b1c4158ba60 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.935387] env[61663]: DEBUG nova.compute.provider_tree [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1277.944569] env[61663]: DEBUG nova.scheduler.client.report [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1277.960747] env[61663]: DEBUG oslo_concurrency.lockutils [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.486s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.961346] env[61663]: ERROR nova.compute.manager [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1277.961346] env[61663]: Faults: ['InvalidArgument'] [ 1277.961346] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Traceback (most recent call last): [ 1277.961346] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1277.961346] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] self.driver.spawn(context, instance, image_meta, [ 1277.961346] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1277.961346] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1277.961346] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1277.961346] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] self._fetch_image_if_missing(context, vi) [ 1277.961346] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1277.961346] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] image_cache(vi, tmp_image_ds_loc) [ 1277.961346] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1277.961717] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] vm_util.copy_virtual_disk( [ 1277.961717] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1277.961717] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] session._wait_for_task(vmdk_copy_task) [ 1277.961717] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1277.961717] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] return self.wait_for_task(task_ref) [ 1277.961717] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1277.961717] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] return evt.wait() [ 1277.961717] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1277.961717] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] result = hub.switch() [ 1277.961717] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1277.961717] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] return self.greenlet.switch() [ 1277.961717] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1277.961717] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] self.f(*self.args, **self.kw) [ 1277.962095] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1277.962095] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] raise exceptions.translate_fault(task_info.error) [ 1277.962095] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1277.962095] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Faults: ['InvalidArgument'] [ 1277.962095] env[61663]: ERROR nova.compute.manager [instance: 7f516750-b7ee-471b-a386-b898aac3985c] [ 1277.962237] env[61663]: DEBUG nova.compute.utils [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1277.965140] env[61663]: DEBUG nova.compute.manager [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Build of instance 7f516750-b7ee-471b-a386-b898aac3985c was re-scheduled: A specified parameter was not correct: fileType [ 1277.965140] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1277.965635] env[61663]: DEBUG nova.compute.manager [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1277.965963] env[61663]: DEBUG oslo_concurrency.lockutils [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Acquiring lock "refresh_cache-7f516750-b7ee-471b-a386-b898aac3985c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1277.966243] env[61663]: DEBUG oslo_concurrency.lockutils [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Acquired lock "refresh_cache-7f516750-b7ee-471b-a386-b898aac3985c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1277.966477] env[61663]: DEBUG nova.network.neutron [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1278.003566] env[61663]: DEBUG nova.network.neutron [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1278.111203] env[61663]: DEBUG nova.network.neutron [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1278.122560] env[61663]: DEBUG oslo_concurrency.lockutils [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Releasing lock "refresh_cache-7f516750-b7ee-471b-a386-b898aac3985c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1278.122793] env[61663]: DEBUG nova.compute.manager [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1278.123154] env[61663]: DEBUG nova.compute.manager [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] [instance: 7f516750-b7ee-471b-a386-b898aac3985c] Skipping network deallocation for instance since networking was not requested. {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1278.257250] env[61663]: INFO nova.scheduler.client.report [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Deleted allocations for instance 7f516750-b7ee-471b-a386-b898aac3985c [ 1278.286650] env[61663]: DEBUG oslo_concurrency.lockutils [None req-34212bc6-9e74-43e6-9532-69b8813a6d00 tempest-ServersAdmin275Test-423465038 tempest-ServersAdmin275Test-423465038-project-member] Lock "7f516750-b7ee-471b-a386-b898aac3985c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.782s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.343230] env[61663]: DEBUG nova.compute.manager [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1278.402512] env[61663]: DEBUG oslo_concurrency.lockutils [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1278.402778] env[61663]: DEBUG oslo_concurrency.lockutils [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1278.404360] env[61663]: INFO nova.compute.claims [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1278.791180] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-233fe9b5-207e-403d-8c4c-f4bd419d70d0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.800528] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e06c145-eb7e-4f15-a4b1-c20baad9081e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.840060] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a47a5cd-580f-48bb-afee-22f399fb98a2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.848723] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b497ca4c-4d5a-48de-818b-307a563de66e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.867068] env[61663]: DEBUG nova.compute.provider_tree [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1278.877348] env[61663]: DEBUG nova.scheduler.client.report [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1278.898880] env[61663]: DEBUG oslo_concurrency.lockutils [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.496s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.899543] env[61663]: DEBUG nova.compute.manager [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1278.941216] env[61663]: DEBUG nova.compute.utils [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1278.943260] env[61663]: DEBUG nova.compute.manager [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1278.943356] env[61663]: DEBUG nova.network.neutron [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1278.959499] env[61663]: DEBUG nova.compute.manager [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1279.045805] env[61663]: DEBUG nova.compute.manager [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1279.075347] env[61663]: DEBUG nova.virt.hardware [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1279.075491] env[61663]: DEBUG nova.virt.hardware [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1279.075596] env[61663]: DEBUG nova.virt.hardware [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1279.075743] env[61663]: DEBUG nova.virt.hardware [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1279.075856] env[61663]: DEBUG nova.virt.hardware [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1279.076023] env[61663]: DEBUG nova.virt.hardware [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1279.077271] env[61663]: DEBUG nova.virt.hardware [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1279.078886] env[61663]: DEBUG nova.virt.hardware [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1279.078886] env[61663]: DEBUG nova.virt.hardware [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1279.078886] env[61663]: DEBUG nova.virt.hardware [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1279.078886] env[61663]: DEBUG nova.virt.hardware [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1279.079300] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-088c8e64-8a49-4cb9-b74b-e96edab06350 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.091123] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-790d8875-13a3-452c-855c-d2db3bc8db48 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.156293] env[61663]: DEBUG nova.policy [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '81b8c0c6d19d4e8abc7cf8f377342232', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '62314e77505f4833b32c5d0c1d34e610', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1280.385069] env[61663]: DEBUG nova.network.neutron [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Successfully created port: 07d961e4-fdd9-43f7-a53e-38d3ac000eab {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1282.271155] env[61663]: DEBUG nova.network.neutron [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Successfully updated port: 07d961e4-fdd9-43f7-a53e-38d3ac000eab {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1282.288731] env[61663]: DEBUG oslo_concurrency.lockutils [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Acquiring lock "refresh_cache-6f7a3a1f-859d-42f5-b986-6a1a038ca536" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1282.290271] env[61663]: DEBUG oslo_concurrency.lockutils [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Acquired lock "refresh_cache-6f7a3a1f-859d-42f5-b986-6a1a038ca536" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.290271] env[61663]: DEBUG nova.network.neutron [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1282.440741] env[61663]: DEBUG nova.network.neutron [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1283.320244] env[61663]: DEBUG nova.network.neutron [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Updating instance_info_cache with network_info: [{"id": "07d961e4-fdd9-43f7-a53e-38d3ac000eab", "address": "fa:16:3e:fe:1e:9f", "network": {"id": "88ae1a07-30f0-4956-a1e4-21747cb24dfb", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1586604386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62314e77505f4833b32c5d0c1d34e610", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07d961e4-fd", "ovs_interfaceid": "07d961e4-fdd9-43f7-a53e-38d3ac000eab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.345465] env[61663]: DEBUG oslo_concurrency.lockutils [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Releasing lock "refresh_cache-6f7a3a1f-859d-42f5-b986-6a1a038ca536" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1283.345465] env[61663]: DEBUG nova.compute.manager [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Instance network_info: |[{"id": "07d961e4-fdd9-43f7-a53e-38d3ac000eab", "address": "fa:16:3e:fe:1e:9f", "network": {"id": "88ae1a07-30f0-4956-a1e4-21747cb24dfb", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1586604386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62314e77505f4833b32c5d0c1d34e610", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07d961e4-fd", "ovs_interfaceid": "07d961e4-fdd9-43f7-a53e-38d3ac000eab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1283.345883] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:1e:9f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '28d04eee-6dbb-491a-a999-b659c799679d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '07d961e4-fdd9-43f7-a53e-38d3ac000eab', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1283.355141] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Creating folder: Project (62314e77505f4833b32c5d0c1d34e610). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1283.355887] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cebc7ab5-e729-4b42-8923-2f1b7d8fa556 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.368141] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Created folder: Project (62314e77505f4833b32c5d0c1d34e610) in parent group-v352575. [ 1283.371267] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Creating folder: Instances. Parent ref: group-v352604. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1283.371267] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9b2e363a-9819-4d2b-8f3e-9f5c6a6ad07e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.382056] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Created folder: Instances in parent group-v352604. [ 1283.382056] env[61663]: DEBUG oslo.service.loopingcall [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1283.382056] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1283.382056] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-680429ff-50e8-4ca1-ba32-dbd10c82415a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.418671] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1283.418671] env[61663]: value = "task-1690692" [ 1283.418671] env[61663]: _type = "Task" [ 1283.418671] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.431338] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690692, 'name': CreateVM_Task} progress is 6%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.545185] env[61663]: DEBUG nova.compute.manager [req-23b513c0-dbfc-4c1b-b152-f53f057b7d55 req-2ee7fa9e-f95a-4cd9-96cd-fe13c1d6bf55 service nova] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Received event network-vif-plugged-07d961e4-fdd9-43f7-a53e-38d3ac000eab {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1283.545461] env[61663]: DEBUG oslo_concurrency.lockutils [req-23b513c0-dbfc-4c1b-b152-f53f057b7d55 req-2ee7fa9e-f95a-4cd9-96cd-fe13c1d6bf55 service nova] Acquiring lock "6f7a3a1f-859d-42f5-b986-6a1a038ca536-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.545601] env[61663]: DEBUG oslo_concurrency.lockutils [req-23b513c0-dbfc-4c1b-b152-f53f057b7d55 req-2ee7fa9e-f95a-4cd9-96cd-fe13c1d6bf55 service nova] Lock "6f7a3a1f-859d-42f5-b986-6a1a038ca536-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.545793] env[61663]: DEBUG oslo_concurrency.lockutils [req-23b513c0-dbfc-4c1b-b152-f53f057b7d55 req-2ee7fa9e-f95a-4cd9-96cd-fe13c1d6bf55 service nova] Lock "6f7a3a1f-859d-42f5-b986-6a1a038ca536-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.545921] env[61663]: DEBUG nova.compute.manager [req-23b513c0-dbfc-4c1b-b152-f53f057b7d55 req-2ee7fa9e-f95a-4cd9-96cd-fe13c1d6bf55 service nova] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] No waiting events found dispatching network-vif-plugged-07d961e4-fdd9-43f7-a53e-38d3ac000eab {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1283.546105] env[61663]: WARNING nova.compute.manager [req-23b513c0-dbfc-4c1b-b152-f53f057b7d55 req-2ee7fa9e-f95a-4cd9-96cd-fe13c1d6bf55 service nova] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Received unexpected event network-vif-plugged-07d961e4-fdd9-43f7-a53e-38d3ac000eab for instance with vm_state building and task_state spawning. [ 1283.932678] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690692, 'name': CreateVM_Task} progress is 99%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.438870] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690692, 'name': CreateVM_Task} progress is 99%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.692677] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1284.939658] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690692, 'name': CreateVM_Task} progress is 99%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.266857] env[61663]: DEBUG oslo_concurrency.lockutils [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Acquiring lock "7a7a0ef0-bbea-42c0-b96e-4efc4207a655" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1285.267143] env[61663]: DEBUG oslo_concurrency.lockutils [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Lock "7a7a0ef0-bbea-42c0-b96e-4efc4207a655" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1285.436154] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690692, 'name': CreateVM_Task, 'duration_secs': 1.594826} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.436365] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1285.436997] env[61663]: DEBUG oslo_concurrency.lockutils [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1285.437184] env[61663]: DEBUG oslo_concurrency.lockutils [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.437718] env[61663]: DEBUG oslo_concurrency.lockutils [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1285.438119] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5184aaa-a7df-4ee1-b8c0-c146ac8a82b2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.443261] env[61663]: DEBUG oslo_vmware.api [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Waiting for the task: (returnval){ [ 1285.443261] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d933e3-ff7e-9a1e-c75c-1bc54347e32f" [ 1285.443261] env[61663]: _type = "Task" [ 1285.443261] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.454671] env[61663]: DEBUG oslo_vmware.api [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d933e3-ff7e-9a1e-c75c-1bc54347e32f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.693752] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1285.693981] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1285.694121] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1285.729966] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1285.730829] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1285.731418] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1285.732857] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1285.732857] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1285.732857] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1285.732857] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1285.732857] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1285.733084] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1285.733084] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1285.733084] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1285.958162] env[61663]: DEBUG oslo_concurrency.lockutils [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1285.958162] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1285.958992] env[61663]: DEBUG oslo_concurrency.lockutils [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1286.526832] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e4d76654-45e2-4b2a-8895-7c5c4d35a945 tempest-ListServersNegativeTestJSON-359424066 tempest-ListServersNegativeTestJSON-359424066-project-member] Acquiring lock "71250f0d-8e81-444b-bed7-792f229a19a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1286.526832] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e4d76654-45e2-4b2a-8895-7c5c4d35a945 tempest-ListServersNegativeTestJSON-359424066 tempest-ListServersNegativeTestJSON-359424066-project-member] Lock "71250f0d-8e81-444b-bed7-792f229a19a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1286.566803] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e4d76654-45e2-4b2a-8895-7c5c4d35a945 tempest-ListServersNegativeTestJSON-359424066 tempest-ListServersNegativeTestJSON-359424066-project-member] Acquiring lock "6c3af584-cfad-4645-8246-d0366ddb8775" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1286.567100] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e4d76654-45e2-4b2a-8895-7c5c4d35a945 tempest-ListServersNegativeTestJSON-359424066 tempest-ListServersNegativeTestJSON-359424066-project-member] Lock "6c3af584-cfad-4645-8246-d0366ddb8775" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1286.602948] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e4d76654-45e2-4b2a-8895-7c5c4d35a945 tempest-ListServersNegativeTestJSON-359424066 tempest-ListServersNegativeTestJSON-359424066-project-member] Acquiring lock "55e8066d-cb8f-4731-b5bd-57adfebae81a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1286.603221] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e4d76654-45e2-4b2a-8895-7c5c4d35a945 tempest-ListServersNegativeTestJSON-359424066 tempest-ListServersNegativeTestJSON-359424066-project-member] Lock "55e8066d-cb8f-4731-b5bd-57adfebae81a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1287.117255] env[61663]: DEBUG oslo_concurrency.lockutils [None req-aaf9ffc7-6ead-4884-aabb-1c4d0fcba52f tempest-AttachInterfacesV270Test-1092600010 tempest-AttachInterfacesV270Test-1092600010-project-member] Acquiring lock "e56bdc9e-964f-4994-be20-9a981095f813" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1287.117255] env[61663]: DEBUG oslo_concurrency.lockutils [None req-aaf9ffc7-6ead-4884-aabb-1c4d0fcba52f tempest-AttachInterfacesV270Test-1092600010 tempest-AttachInterfacesV270Test-1092600010-project-member] Lock "e56bdc9e-964f-4994-be20-9a981095f813" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1287.644419] env[61663]: DEBUG nova.compute.manager [req-16340d21-ca5b-46a7-a821-60ee0a12a0fb req-7c8f25a1-2eee-4aba-943d-fd28dac2aff2 service nova] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Received event network-changed-07d961e4-fdd9-43f7-a53e-38d3ac000eab {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1287.644766] env[61663]: DEBUG nova.compute.manager [req-16340d21-ca5b-46a7-a821-60ee0a12a0fb req-7c8f25a1-2eee-4aba-943d-fd28dac2aff2 service nova] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Refreshing instance network info cache due to event network-changed-07d961e4-fdd9-43f7-a53e-38d3ac000eab. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1287.644843] env[61663]: DEBUG oslo_concurrency.lockutils [req-16340d21-ca5b-46a7-a821-60ee0a12a0fb req-7c8f25a1-2eee-4aba-943d-fd28dac2aff2 service nova] Acquiring lock "refresh_cache-6f7a3a1f-859d-42f5-b986-6a1a038ca536" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1287.644963] env[61663]: DEBUG oslo_concurrency.lockutils [req-16340d21-ca5b-46a7-a821-60ee0a12a0fb req-7c8f25a1-2eee-4aba-943d-fd28dac2aff2 service nova] Acquired lock "refresh_cache-6f7a3a1f-859d-42f5-b986-6a1a038ca536" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1287.645161] env[61663]: DEBUG nova.network.neutron [req-16340d21-ca5b-46a7-a821-60ee0a12a0fb req-7c8f25a1-2eee-4aba-943d-fd28dac2aff2 service nova] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Refreshing network info cache for port 07d961e4-fdd9-43f7-a53e-38d3ac000eab {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1287.726756] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1287.803726] env[61663]: DEBUG oslo_concurrency.lockutils [None req-883e33b0-fd45-4945-b405-76ea88b6cbc8 tempest-ListImageFiltersTestJSON-496746134 tempest-ListImageFiltersTestJSON-496746134-project-member] Acquiring lock "5d2229e3-08b2-432b-98b5-95cc9f6e649f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1287.803726] env[61663]: DEBUG oslo_concurrency.lockutils [None req-883e33b0-fd45-4945-b405-76ea88b6cbc8 tempest-ListImageFiltersTestJSON-496746134 tempest-ListImageFiltersTestJSON-496746134-project-member] Lock "5d2229e3-08b2-432b-98b5-95cc9f6e649f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.073729] env[61663]: DEBUG nova.network.neutron [req-16340d21-ca5b-46a7-a821-60ee0a12a0fb req-7c8f25a1-2eee-4aba-943d-fd28dac2aff2 service nova] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Updated VIF entry in instance network info cache for port 07d961e4-fdd9-43f7-a53e-38d3ac000eab. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1288.074098] env[61663]: DEBUG nova.network.neutron [req-16340d21-ca5b-46a7-a821-60ee0a12a0fb req-7c8f25a1-2eee-4aba-943d-fd28dac2aff2 service nova] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Updating instance_info_cache with network_info: [{"id": "07d961e4-fdd9-43f7-a53e-38d3ac000eab", "address": "fa:16:3e:fe:1e:9f", "network": {"id": "88ae1a07-30f0-4956-a1e4-21747cb24dfb", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1586604386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62314e77505f4833b32c5d0c1d34e610", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "28d04eee-6dbb-491a-a999-b659c799679d", "external-id": "nsx-vlan-transportzone-501", "segmentation_id": 501, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07d961e4-fd", "ovs_interfaceid": "07d961e4-fdd9-43f7-a53e-38d3ac000eab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1288.084099] env[61663]: DEBUG oslo_concurrency.lockutils [req-16340d21-ca5b-46a7-a821-60ee0a12a0fb req-7c8f25a1-2eee-4aba-943d-fd28dac2aff2 service nova] Releasing lock "refresh_cache-6f7a3a1f-859d-42f5-b986-6a1a038ca536" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1288.692413] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1288.692730] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1289.307059] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3cc7ddf2-952f-4ac2-ab15-d4b41d10ef52 tempest-ListImageFiltersTestJSON-496746134 tempest-ListImageFiltersTestJSON-496746134-project-member] Acquiring lock "0c42630f-9e39-4eeb-aa56-d953cff6b4a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1289.307059] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3cc7ddf2-952f-4ac2-ab15-d4b41d10ef52 tempest-ListImageFiltersTestJSON-496746134 tempest-ListImageFiltersTestJSON-496746134-project-member] Lock "0c42630f-9e39-4eeb-aa56-d953cff6b4a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1289.692142] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1289.692142] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1289.692142] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1289.704614] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1289.704906] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1289.705799] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1289.705799] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1289.706300] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4025331e-6f34-4a30-be04-021d4944f3f7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.718195] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e9ddd8-7f91-4040-b6c2-4d88a022e2b7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.740143] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006ab173-3a88-41c8-a1cc-32594d172a79 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.747430] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec10a90d-f381-4320-8119-915279da8125 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.781565] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181294MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1289.781565] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1289.781565] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1289.880099] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.880292] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 29bee3d3-a6d2-43a9-8439-d5b842214cf1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.880455] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.881309] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 1c36f4af-0222-48d3-ac90-776f7fe807de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.881309] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5e748c4b-03c5-4a88-a4ed-27093f2aef47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.881309] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 71509f58-5616-4d6a-9a88-3bfd9d414a0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.881309] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 790791ee-4e6c-4116-8ade-ba61f55ebd4d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.881652] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance f9a675b6-e76d-492b-ac34-3c7b10553fca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.881652] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.881652] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 6f7a3a1f-859d-42f5-b986-6a1a038ca536 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1289.917091] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1289.951695] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ee0e3e54-c135-489f-87ca-f441efebcbd5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1289.972655] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 04488672-86c4-415b-961e-94641d570112 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1289.984620] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance fffb383d-e1db-4640-9201-0ea897c472d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1289.999216] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 4ef40272-deb2-414a-b0ac-0bc30ba2bc84 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.016483] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 879dce1e-340a-48d0-9291-857f39fec597 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.039730] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance f8dbebee-1ccc-4af8-a302-75e2f819161d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.059252] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 10cb6a2e-4a08-453e-9372-4ea14958470d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.081029] env[61663]: DEBUG oslo_concurrency.lockutils [None req-aa3ce05a-c267-4627-874a-f41a24704db2 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Acquiring lock "562e2e30-99d6-4edd-8382-e2e765c6a449" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1290.081312] env[61663]: DEBUG oslo_concurrency.lockutils [None req-aa3ce05a-c267-4627-874a-f41a24704db2 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Lock "562e2e30-99d6-4edd-8382-e2e765c6a449" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1290.082388] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 29c17291-50be-45a3-93c6-76bc8c7cbbb4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.098303] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance fde11c71-3511-4fea-84c7-0e7de062951b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.117662] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 7a7a0ef0-bbea-42c0-b96e-4efc4207a655 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.132563] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 71250f0d-8e81-444b-bed7-792f229a19a5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.146876] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 6c3af584-cfad-4645-8246-d0366ddb8775 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.159830] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 55e8066d-cb8f-4731-b5bd-57adfebae81a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.174150] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance e56bdc9e-964f-4994-be20-9a981095f813 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.187865] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5d2229e3-08b2-432b-98b5-95cc9f6e649f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.200497] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 0c42630f-9e39-4eeb-aa56-d953cff6b4a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.214151] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 562e2e30-99d6-4edd-8382-e2e765c6a449 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1290.215090] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1290.215566] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1290.699951] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f41592e0-dab4-4e9d-8396-41c343d631ea {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.712345] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d99c47-933b-4494-82f6-7bacd4da2840 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.750163] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a63499c-b565-466e-b05c-886753b356b6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.759159] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94378f63-cf63-4be5-9558-c3c989c88cd7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.776645] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1290.786810] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1290.808275] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1290.808275] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.027s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1292.808345] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1292.843146] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1292.843381] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.205651] env[61663]: DEBUG oslo_concurrency.lockutils [None req-16401c01-01df-4ef5-aac0-9dbc372bcfd3 tempest-DeleteServersAdminTestJSON-1903805484 tempest-DeleteServersAdminTestJSON-1903805484-project-member] Acquiring lock "ffe47fec-5f84-4a9a-a103-e59a90201064" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1293.205925] env[61663]: DEBUG oslo_concurrency.lockutils [None req-16401c01-01df-4ef5-aac0-9dbc372bcfd3 tempest-DeleteServersAdminTestJSON-1903805484 tempest-DeleteServersAdminTestJSON-1903805484-project-member] Lock "ffe47fec-5f84-4a9a-a103-e59a90201064" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.095832] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4980148b-bb2b-4f25-8e8c-0c40171d8aa7 tempest-ServerPasswordTestJSON-28149881 tempest-ServerPasswordTestJSON-28149881-project-member] Acquiring lock "25222911-beaf-4f80-be5e-a6decd09958d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1298.096214] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4980148b-bb2b-4f25-8e8c-0c40171d8aa7 tempest-ServerPasswordTestJSON-28149881 tempest-ServerPasswordTestJSON-28149881-project-member] Lock "25222911-beaf-4f80-be5e-a6decd09958d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1303.319740] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c32361a0-91fc-428e-bd66-782734a43df3 tempest-ServerDiagnosticsV248Test-560508285 tempest-ServerDiagnosticsV248Test-560508285-project-member] Acquiring lock "4e94548a-c81b-46c1-886b-bb5e2b8ebf9d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1303.320072] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c32361a0-91fc-428e-bd66-782734a43df3 tempest-ServerDiagnosticsV248Test-560508285 tempest-ServerDiagnosticsV248Test-560508285-project-member] Lock "4e94548a-c81b-46c1-886b-bb5e2b8ebf9d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1305.514609] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f39f149a-5cc6-43d7-8bc6-629db9bdfd8f tempest-ServersTestManualDisk-1712154969 tempest-ServersTestManualDisk-1712154969-project-member] Acquiring lock "9f599f13-3906-4766-a1a5-a324da916370" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1305.514995] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f39f149a-5cc6-43d7-8bc6-629db9bdfd8f tempest-ServersTestManualDisk-1712154969 tempest-ServersTestManualDisk-1712154969-project-member] Lock "9f599f13-3906-4766-a1a5-a324da916370" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1312.521185] env[61663]: DEBUG oslo_concurrency.lockutils [None req-39dc81b8-87d1-4c46-bc00-4d97ad4903e7 tempest-ServersTestBootFromVolume-1644706521 tempest-ServersTestBootFromVolume-1644706521-project-member] Acquiring lock "5def97c4-7d72-4ade-bb17-160e91f67f75" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1312.521794] env[61663]: DEBUG oslo_concurrency.lockutils [None req-39dc81b8-87d1-4c46-bc00-4d97ad4903e7 tempest-ServersTestBootFromVolume-1644706521 tempest-ServersTestBootFromVolume-1644706521-project-member] Lock "5def97c4-7d72-4ade-bb17-160e91f67f75" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1326.153073] env[61663]: WARNING oslo_vmware.rw_handles [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1326.153073] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1326.153073] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1326.153073] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1326.153073] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1326.153073] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 1326.153073] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1326.153073] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1326.153073] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1326.153073] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1326.153073] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1326.153073] env[61663]: ERROR oslo_vmware.rw_handles [ 1326.153073] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/38a5b72d-930e-4529-9f26-f392e22e19fb/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1326.155041] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1326.155433] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Copying Virtual Disk [datastore1] vmware_temp/38a5b72d-930e-4529-9f26-f392e22e19fb/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/38a5b72d-930e-4529-9f26-f392e22e19fb/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1326.155830] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8597aad0-e047-4798-a9a4-19105d8beb8d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.164952] env[61663]: DEBUG oslo_vmware.api [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Waiting for the task: (returnval){ [ 1326.164952] env[61663]: value = "task-1690704" [ 1326.164952] env[61663]: _type = "Task" [ 1326.164952] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.173057] env[61663]: DEBUG oslo_vmware.api [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Task: {'id': task-1690704, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.675447] env[61663]: DEBUG oslo_vmware.exceptions [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1326.675692] env[61663]: DEBUG oslo_concurrency.lockutils [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1326.676297] env[61663]: ERROR nova.compute.manager [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1326.676297] env[61663]: Faults: ['InvalidArgument'] [ 1326.676297] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Traceback (most recent call last): [ 1326.676297] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1326.676297] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] yield resources [ 1326.676297] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1326.676297] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] self.driver.spawn(context, instance, image_meta, [ 1326.676297] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1326.676297] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1326.676297] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1326.676297] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] self._fetch_image_if_missing(context, vi) [ 1326.676297] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1326.676626] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] image_cache(vi, tmp_image_ds_loc) [ 1326.676626] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1326.676626] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] vm_util.copy_virtual_disk( [ 1326.676626] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1326.676626] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] session._wait_for_task(vmdk_copy_task) [ 1326.676626] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1326.676626] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] return self.wait_for_task(task_ref) [ 1326.676626] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1326.676626] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] return evt.wait() [ 1326.676626] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1326.676626] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] result = hub.switch() [ 1326.676626] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1326.676626] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] return self.greenlet.switch() [ 1326.677105] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1326.677105] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] self.f(*self.args, **self.kw) [ 1326.677105] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1326.677105] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] raise exceptions.translate_fault(task_info.error) [ 1326.677105] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1326.677105] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Faults: ['InvalidArgument'] [ 1326.677105] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] [ 1326.677105] env[61663]: INFO nova.compute.manager [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Terminating instance [ 1326.678268] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1326.678471] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1326.678699] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1729851a-244b-4371-a289-fbfc481be98a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.680978] env[61663]: DEBUG nova.compute.manager [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1326.681189] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1326.681882] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-365defed-08ea-4d65-b6a5-0b6b3c0c1f79 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.688882] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1326.689224] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-492f1c73-0e2b-4377-84da-6dbd0ef15570 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.691375] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1326.691559] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1326.692496] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc66ce0f-d7f5-4a2c-8a76-630057e8b84d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.697180] env[61663]: DEBUG oslo_vmware.api [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Waiting for the task: (returnval){ [ 1326.697180] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b7234e-e494-adb9-a8ec-50bc53cd117a" [ 1326.697180] env[61663]: _type = "Task" [ 1326.697180] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.704629] env[61663]: DEBUG oslo_vmware.api [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b7234e-e494-adb9-a8ec-50bc53cd117a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.764164] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1326.764405] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1326.764590] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Deleting the datastore file [datastore1] 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1326.764888] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-545b585f-ae44-4b50-9895-0b61833a8abc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.771671] env[61663]: DEBUG oslo_vmware.api [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Waiting for the task: (returnval){ [ 1326.771671] env[61663]: value = "task-1690706" [ 1326.771671] env[61663]: _type = "Task" [ 1326.771671] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.780932] env[61663]: DEBUG oslo_vmware.api [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Task: {'id': task-1690706, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.207512] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1327.207781] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Creating directory with path [datastore1] vmware_temp/3f7c099d-8de5-4f6f-8d29-4770b79579a6/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1327.208079] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cd33c326-616d-418c-8544-f926abbcd78d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.219822] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Created directory with path [datastore1] vmware_temp/3f7c099d-8de5-4f6f-8d29-4770b79579a6/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1327.220036] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Fetch image to [datastore1] vmware_temp/3f7c099d-8de5-4f6f-8d29-4770b79579a6/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1327.220216] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/3f7c099d-8de5-4f6f-8d29-4770b79579a6/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1327.220964] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-310d5f88-764e-4b67-87ef-a41159b83b38 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.227763] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e1e481-081f-495a-aaaf-a1d21e8379ff {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.236796] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c22ce18-4f5e-4a0d-abbd-87ecbbff70e3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.269284] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2398fd86-8af5-475b-840b-ac1cfce42b25 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.277149] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b5f642e2-5d29-4df9-ab38-689731a621b7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.281643] env[61663]: DEBUG oslo_vmware.api [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Task: {'id': task-1690706, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074263} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.281876] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1327.282074] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1327.282255] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1327.282430] env[61663]: INFO nova.compute.manager [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1327.284492] env[61663]: DEBUG nova.compute.claims [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1327.284668] env[61663]: DEBUG oslo_concurrency.lockutils [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1327.284882] env[61663]: DEBUG oslo_concurrency.lockutils [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1327.314842] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1327.374950] env[61663]: DEBUG oslo_vmware.rw_handles [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3f7c099d-8de5-4f6f-8d29-4770b79579a6/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1327.435605] env[61663]: DEBUG oslo_vmware.rw_handles [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1327.435791] env[61663]: DEBUG oslo_vmware.rw_handles [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3f7c099d-8de5-4f6f-8d29-4770b79579a6/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1327.780816] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9205c6da-a894-4a14-a52b-c7813b52b3d9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.788988] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f3f995-d190-4c0d-8cec-44e7f333c76a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.820052] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b76147f-3ae1-4f83-a6ee-e59f7d79d9aa {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.827669] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39527c47-00e8-41dc-aeb7-02a7efc5ebe2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.841256] env[61663]: DEBUG nova.compute.provider_tree [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1327.850855] env[61663]: DEBUG nova.scheduler.client.report [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1327.865684] env[61663]: DEBUG oslo_concurrency.lockutils [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.581s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.866394] env[61663]: ERROR nova.compute.manager [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1327.866394] env[61663]: Faults: ['InvalidArgument'] [ 1327.866394] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Traceback (most recent call last): [ 1327.866394] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1327.866394] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] self.driver.spawn(context, instance, image_meta, [ 1327.866394] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1327.866394] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1327.866394] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1327.866394] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] self._fetch_image_if_missing(context, vi) [ 1327.866394] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1327.866394] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] image_cache(vi, tmp_image_ds_loc) [ 1327.866394] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1327.866801] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] vm_util.copy_virtual_disk( [ 1327.866801] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1327.866801] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] session._wait_for_task(vmdk_copy_task) [ 1327.866801] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1327.866801] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] return self.wait_for_task(task_ref) [ 1327.866801] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1327.866801] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] return evt.wait() [ 1327.866801] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1327.866801] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] result = hub.switch() [ 1327.866801] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1327.866801] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] return self.greenlet.switch() [ 1327.866801] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1327.866801] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] self.f(*self.args, **self.kw) [ 1327.867079] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1327.867079] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] raise exceptions.translate_fault(task_info.error) [ 1327.867079] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1327.867079] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Faults: ['InvalidArgument'] [ 1327.867079] env[61663]: ERROR nova.compute.manager [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] [ 1327.867199] env[61663]: DEBUG nova.compute.utils [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1327.868563] env[61663]: DEBUG nova.compute.manager [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Build of instance 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd was re-scheduled: A specified parameter was not correct: fileType [ 1327.868563] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1327.868938] env[61663]: DEBUG nova.compute.manager [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1327.869134] env[61663]: DEBUG nova.compute.manager [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1327.869296] env[61663]: DEBUG nova.compute.manager [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1327.869460] env[61663]: DEBUG nova.network.neutron [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1328.429312] env[61663]: DEBUG nova.network.neutron [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1328.440708] env[61663]: INFO nova.compute.manager [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] [instance: 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd] Took 0.57 seconds to deallocate network for instance. [ 1328.552048] env[61663]: INFO nova.scheduler.client.report [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Deleted allocations for instance 2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd [ 1328.574676] env[61663]: DEBUG oslo_concurrency.lockutils [None req-564c06ad-7afe-4c4d-b243-8a456a98528c tempest-ServersAdminNegativeTestJSON-394452217 tempest-ServersAdminNegativeTestJSON-394452217-project-member] Lock "2d442033-3ecf-4d0c-90f9-a5bbb1d7a6fd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 101.204s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1328.607199] env[61663]: DEBUG nova.compute.manager [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1328.675085] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1328.675382] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1328.676978] env[61663]: INFO nova.compute.claims [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1329.152819] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcfac0eb-289c-496b-bec4-4f36348f4593 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.160357] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fedb547a-c65a-4f15-980c-5aaba4565c84 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.192046] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e686b681-ac64-426e-b159-4ce45511ad8c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.199274] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9dbc47f-6ace-44f9-99f6-50af4bd1c4b5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.213924] env[61663]: DEBUG nova.compute.provider_tree [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1329.222203] env[61663]: DEBUG nova.scheduler.client.report [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1329.239386] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.564s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1329.239922] env[61663]: DEBUG nova.compute.manager [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1329.274977] env[61663]: DEBUG nova.compute.utils [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1329.276650] env[61663]: DEBUG nova.compute.manager [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1329.276763] env[61663]: DEBUG nova.network.neutron [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1329.285530] env[61663]: DEBUG nova.compute.manager [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1329.349742] env[61663]: DEBUG nova.compute.manager [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1329.366898] env[61663]: DEBUG nova.policy [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '033e5ebd18fb421b8ad3f4ad5033f1b5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7bb1bdc9b1004ff591ab4e001d81b400', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1329.379753] env[61663]: DEBUG nova.virt.hardware [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1329.380044] env[61663]: DEBUG nova.virt.hardware [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1329.380215] env[61663]: DEBUG nova.virt.hardware [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1329.380400] env[61663]: DEBUG nova.virt.hardware [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1329.380548] env[61663]: DEBUG nova.virt.hardware [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1329.380694] env[61663]: DEBUG nova.virt.hardware [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1329.380932] env[61663]: DEBUG nova.virt.hardware [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1329.381229] env[61663]: DEBUG nova.virt.hardware [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1329.381416] env[61663]: DEBUG nova.virt.hardware [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1329.381585] env[61663]: DEBUG nova.virt.hardware [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1329.381984] env[61663]: DEBUG nova.virt.hardware [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1329.382627] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43775418-dcc2-4656-ac2a-0da5cd9d303d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.390905] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee729024-60f7-4f25-9d9c-14440e8158f2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.138356] env[61663]: DEBUG nova.network.neutron [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Successfully created port: 9e348538-3e96-4487-a7d8-21354ad72a52 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1331.235572] env[61663]: DEBUG nova.network.neutron [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Successfully updated port: 9e348538-3e96-4487-a7d8-21354ad72a52 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1331.250853] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "refresh_cache-98eb6b3f-69c8-4837-9b5c-a1485fe5cab6" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1331.250924] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquired lock "refresh_cache-98eb6b3f-69c8-4837-9b5c-a1485fe5cab6" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.251104] env[61663]: DEBUG nova.network.neutron [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1331.338576] env[61663]: DEBUG nova.network.neutron [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1331.550991] env[61663]: DEBUG nova.compute.manager [req-20673dbf-4b18-4a42-9f8d-3201a6615ef9 req-f22f16a3-6819-47be-8f59-6f7f7337ec21 service nova] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Received event network-vif-plugged-9e348538-3e96-4487-a7d8-21354ad72a52 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1331.551209] env[61663]: DEBUG oslo_concurrency.lockutils [req-20673dbf-4b18-4a42-9f8d-3201a6615ef9 req-f22f16a3-6819-47be-8f59-6f7f7337ec21 service nova] Acquiring lock "98eb6b3f-69c8-4837-9b5c-a1485fe5cab6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.551388] env[61663]: DEBUG oslo_concurrency.lockutils [req-20673dbf-4b18-4a42-9f8d-3201a6615ef9 req-f22f16a3-6819-47be-8f59-6f7f7337ec21 service nova] Lock "98eb6b3f-69c8-4837-9b5c-a1485fe5cab6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.551555] env[61663]: DEBUG oslo_concurrency.lockutils [req-20673dbf-4b18-4a42-9f8d-3201a6615ef9 req-f22f16a3-6819-47be-8f59-6f7f7337ec21 service nova] Lock "98eb6b3f-69c8-4837-9b5c-a1485fe5cab6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1331.551721] env[61663]: DEBUG nova.compute.manager [req-20673dbf-4b18-4a42-9f8d-3201a6615ef9 req-f22f16a3-6819-47be-8f59-6f7f7337ec21 service nova] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] No waiting events found dispatching network-vif-plugged-9e348538-3e96-4487-a7d8-21354ad72a52 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1331.551887] env[61663]: WARNING nova.compute.manager [req-20673dbf-4b18-4a42-9f8d-3201a6615ef9 req-f22f16a3-6819-47be-8f59-6f7f7337ec21 service nova] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Received unexpected event network-vif-plugged-9e348538-3e96-4487-a7d8-21354ad72a52 for instance with vm_state building and task_state spawning. [ 1331.845981] env[61663]: DEBUG nova.network.neutron [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Updating instance_info_cache with network_info: [{"id": "9e348538-3e96-4487-a7d8-21354ad72a52", "address": "fa:16:3e:d2:81:18", "network": {"id": "063a4d32-b405-433b-a1a0-ce9820683878", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-673894266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bb1bdc9b1004ff591ab4e001d81b400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e348538-3e", "ovs_interfaceid": "9e348538-3e96-4487-a7d8-21354ad72a52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1331.860819] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Releasing lock "refresh_cache-98eb6b3f-69c8-4837-9b5c-a1485fe5cab6" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1331.860991] env[61663]: DEBUG nova.compute.manager [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Instance network_info: |[{"id": "9e348538-3e96-4487-a7d8-21354ad72a52", "address": "fa:16:3e:d2:81:18", "network": {"id": "063a4d32-b405-433b-a1a0-ce9820683878", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-673894266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bb1bdc9b1004ff591ab4e001d81b400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e348538-3e", "ovs_interfaceid": "9e348538-3e96-4487-a7d8-21354ad72a52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1331.861785] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:81:18', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa09e855-8af1-419b-b78d-8ffcc94b1bfb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e348538-3e96-4487-a7d8-21354ad72a52', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1331.870241] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Creating folder: Project (7bb1bdc9b1004ff591ab4e001d81b400). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1331.870294] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65e1319b-7313-4c22-8e21-ba553a48811e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.882544] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Created folder: Project (7bb1bdc9b1004ff591ab4e001d81b400) in parent group-v352575. [ 1331.882812] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Creating folder: Instances. Parent ref: group-v352611. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1331.883380] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-85e1225b-8800-437f-abdd-7b5c008341fc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.893058] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Created folder: Instances in parent group-v352611. [ 1331.893300] env[61663]: DEBUG oslo.service.loopingcall [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1331.893507] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1331.893712] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f9f40c0-4e8e-4108-9fe6-156a4ef8f268 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.912166] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1331.912166] env[61663]: value = "task-1690709" [ 1331.912166] env[61663]: _type = "Task" [ 1331.912166] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.920328] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690709, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.423634] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690709, 'name': CreateVM_Task, 'duration_secs': 0.294973} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.423918] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1332.424392] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1332.424540] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1332.424877] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1332.425141] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa99dfee-f766-433c-a3fc-3a5ec250c4f8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.431022] env[61663]: DEBUG oslo_vmware.api [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for the task: (returnval){ [ 1332.431022] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520c15c7-8151-49a0-f35f-d000f8ac107b" [ 1332.431022] env[61663]: _type = "Task" [ 1332.431022] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.439310] env[61663]: DEBUG oslo_vmware.api [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520c15c7-8151-49a0-f35f-d000f8ac107b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.940757] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1332.940969] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1332.941232] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1333.735031] env[61663]: DEBUG nova.compute.manager [req-cc30fcc9-48c5-4672-af94-6093837e938d req-fe5d2f1d-b6f0-444b-bffb-741ce33dc1e2 service nova] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Received event network-changed-9e348538-3e96-4487-a7d8-21354ad72a52 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1333.735031] env[61663]: DEBUG nova.compute.manager [req-cc30fcc9-48c5-4672-af94-6093837e938d req-fe5d2f1d-b6f0-444b-bffb-741ce33dc1e2 service nova] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Refreshing instance network info cache due to event network-changed-9e348538-3e96-4487-a7d8-21354ad72a52. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1333.735031] env[61663]: DEBUG oslo_concurrency.lockutils [req-cc30fcc9-48c5-4672-af94-6093837e938d req-fe5d2f1d-b6f0-444b-bffb-741ce33dc1e2 service nova] Acquiring lock "refresh_cache-98eb6b3f-69c8-4837-9b5c-a1485fe5cab6" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1333.735031] env[61663]: DEBUG oslo_concurrency.lockutils [req-cc30fcc9-48c5-4672-af94-6093837e938d req-fe5d2f1d-b6f0-444b-bffb-741ce33dc1e2 service nova] Acquired lock "refresh_cache-98eb6b3f-69c8-4837-9b5c-a1485fe5cab6" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1333.735031] env[61663]: DEBUG nova.network.neutron [req-cc30fcc9-48c5-4672-af94-6093837e938d req-fe5d2f1d-b6f0-444b-bffb-741ce33dc1e2 service nova] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Refreshing network info cache for port 9e348538-3e96-4487-a7d8-21354ad72a52 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1334.619272] env[61663]: DEBUG nova.network.neutron [req-cc30fcc9-48c5-4672-af94-6093837e938d req-fe5d2f1d-b6f0-444b-bffb-741ce33dc1e2 service nova] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Updated VIF entry in instance network info cache for port 9e348538-3e96-4487-a7d8-21354ad72a52. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1334.619697] env[61663]: DEBUG nova.network.neutron [req-cc30fcc9-48c5-4672-af94-6093837e938d req-fe5d2f1d-b6f0-444b-bffb-741ce33dc1e2 service nova] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Updating instance_info_cache with network_info: [{"id": "9e348538-3e96-4487-a7d8-21354ad72a52", "address": "fa:16:3e:d2:81:18", "network": {"id": "063a4d32-b405-433b-a1a0-ce9820683878", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-673894266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bb1bdc9b1004ff591ab4e001d81b400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e348538-3e", "ovs_interfaceid": "9e348538-3e96-4487-a7d8-21354ad72a52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1334.635587] env[61663]: DEBUG oslo_concurrency.lockutils [req-cc30fcc9-48c5-4672-af94-6093837e938d req-fe5d2f1d-b6f0-444b-bffb-741ce33dc1e2 service nova] Releasing lock "refresh_cache-98eb6b3f-69c8-4837-9b5c-a1485fe5cab6" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1340.473744] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Acquiring lock "1305216b-0ee5-499a-a82a-30b45a8c832c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1340.474039] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Lock "1305216b-0ee5-499a-a82a-30b45a8c832c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.692612] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1346.692814] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1346.692919] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1346.715346] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1346.715514] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1346.717280] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1346.717280] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1346.717280] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1346.717280] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1346.717280] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1346.717519] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1346.717519] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1346.717519] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1346.717519] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1346.717519] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1348.692098] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1348.692412] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.695589] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.708546] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1349.708889] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1349.709061] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1349.709226] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1349.710659] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb03c46-37fd-471e-93cd-b8bb1aaf3e76 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.719387] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c718cc5a-32f0-448c-87ae-ada0bdd63c6c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.733458] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-499d0830-2537-4949-8909-0fd0b3c2435b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.740027] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d7c627-8ec4-4059-89fc-d58c0de4e779 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.771029] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181310MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1349.771186] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1349.771386] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1349.847218] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 29bee3d3-a6d2-43a9-8439-d5b842214cf1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1349.847375] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1349.847585] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 1c36f4af-0222-48d3-ac90-776f7fe807de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1349.847617] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5e748c4b-03c5-4a88-a4ed-27093f2aef47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1349.847742] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 71509f58-5616-4d6a-9a88-3bfd9d414a0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1349.847867] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 790791ee-4e6c-4116-8ade-ba61f55ebd4d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1349.847986] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance f9a675b6-e76d-492b-ac34-3c7b10553fca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1349.848118] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1349.848240] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 6f7a3a1f-859d-42f5-b986-6a1a038ca536 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1349.848370] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1349.860782] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ee0e3e54-c135-489f-87ca-f441efebcbd5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.871843] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 04488672-86c4-415b-961e-94641d570112 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.881586] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance fffb383d-e1db-4640-9201-0ea897c472d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.897700] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 4ef40272-deb2-414a-b0ac-0bc30ba2bc84 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.908115] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 879dce1e-340a-48d0-9291-857f39fec597 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.917716] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance f8dbebee-1ccc-4af8-a302-75e2f819161d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.927430] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 10cb6a2e-4a08-453e-9372-4ea14958470d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.937038] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 29c17291-50be-45a3-93c6-76bc8c7cbbb4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.946979] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance fde11c71-3511-4fea-84c7-0e7de062951b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.957013] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 7a7a0ef0-bbea-42c0-b96e-4efc4207a655 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.969044] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 71250f0d-8e81-444b-bed7-792f229a19a5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.979273] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 6c3af584-cfad-4645-8246-d0366ddb8775 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.988706] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 55e8066d-cb8f-4731-b5bd-57adfebae81a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.998572] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance e56bdc9e-964f-4994-be20-9a981095f813 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1350.008716] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5d2229e3-08b2-432b-98b5-95cc9f6e649f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1350.019435] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 0c42630f-9e39-4eeb-aa56-d953cff6b4a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1350.029869] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 562e2e30-99d6-4edd-8382-e2e765c6a449 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1350.038933] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ffe47fec-5f84-4a9a-a103-e59a90201064 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1350.048993] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 25222911-beaf-4f80-be5e-a6decd09958d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1350.060113] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 4e94548a-c81b-46c1-886b-bb5e2b8ebf9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1350.072376] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 9f599f13-3906-4766-a1a5-a324da916370 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1350.083815] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5def97c4-7d72-4ade-bb17-160e91f67f75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1350.095998] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 1305216b-0ee5-499a-a82a-30b45a8c832c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1350.096430] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1350.096672] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1350.489856] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ca7d0c6-d1ea-4a0d-983a-0fe9a21d714a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.497677] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84aec5f9-070f-4639-bb7f-bb968f153f85 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.528189] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a75b0333-32b9-4bc1-9d39-19887c3cdeb5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.535173] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-701c9010-446d-4bc5-940b-31eb78750fb5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.547942] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1350.556291] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1350.570083] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1350.570083] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.799s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.566605] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1351.692536] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1351.692536] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1351.692536] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1354.692096] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1374.099930] env[61663]: WARNING oslo_vmware.rw_handles [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1374.099930] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1374.099930] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1374.099930] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1374.099930] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1374.099930] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 1374.099930] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1374.099930] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1374.099930] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1374.099930] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1374.099930] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1374.099930] env[61663]: ERROR oslo_vmware.rw_handles [ 1374.099930] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/3f7c099d-8de5-4f6f-8d29-4770b79579a6/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1374.101184] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1374.101426] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Copying Virtual Disk [datastore1] vmware_temp/3f7c099d-8de5-4f6f-8d29-4770b79579a6/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/3f7c099d-8de5-4f6f-8d29-4770b79579a6/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1374.101712] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b797d583-2685-4c36-a1b5-d6e9128186c6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.111310] env[61663]: DEBUG oslo_vmware.api [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Waiting for the task: (returnval){ [ 1374.111310] env[61663]: value = "task-1690710" [ 1374.111310] env[61663]: _type = "Task" [ 1374.111310] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.118840] env[61663]: DEBUG oslo_vmware.api [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Task: {'id': task-1690710, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.621747] env[61663]: DEBUG oslo_vmware.exceptions [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1374.622086] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1374.622643] env[61663]: ERROR nova.compute.manager [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1374.622643] env[61663]: Faults: ['InvalidArgument'] [ 1374.622643] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Traceback (most recent call last): [ 1374.622643] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1374.622643] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] yield resources [ 1374.622643] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1374.622643] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] self.driver.spawn(context, instance, image_meta, [ 1374.622643] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1374.622643] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1374.622643] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1374.622643] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] self._fetch_image_if_missing(context, vi) [ 1374.622643] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1374.622960] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] image_cache(vi, tmp_image_ds_loc) [ 1374.622960] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1374.622960] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] vm_util.copy_virtual_disk( [ 1374.622960] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1374.622960] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] session._wait_for_task(vmdk_copy_task) [ 1374.622960] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1374.622960] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] return self.wait_for_task(task_ref) [ 1374.622960] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1374.622960] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] return evt.wait() [ 1374.622960] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1374.622960] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] result = hub.switch() [ 1374.622960] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1374.622960] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] return self.greenlet.switch() [ 1374.623310] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1374.623310] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] self.f(*self.args, **self.kw) [ 1374.623310] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1374.623310] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] raise exceptions.translate_fault(task_info.error) [ 1374.623310] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1374.623310] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Faults: ['InvalidArgument'] [ 1374.623310] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] [ 1374.623310] env[61663]: INFO nova.compute.manager [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Terminating instance [ 1374.624605] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1374.624811] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1374.625098] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d630f10-bfe0-4e79-a6cd-95648d55ab6e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.627931] env[61663]: DEBUG nova.compute.manager [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1374.628141] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1374.628869] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b0d33a0-4ae1-44b1-bf1e-50c8efd7f69a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.635888] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1374.636122] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d32e116f-a93f-4680-b077-6b6503744440 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.638244] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1374.638422] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1374.639361] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e98a38de-89f6-4f26-89c1-120f2906074a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.644058] env[61663]: DEBUG oslo_vmware.api [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Waiting for the task: (returnval){ [ 1374.644058] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520b8ea4-b1a9-4c41-d9f5-bba093f4e905" [ 1374.644058] env[61663]: _type = "Task" [ 1374.644058] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.652819] env[61663]: DEBUG oslo_vmware.api [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520b8ea4-b1a9-4c41-d9f5-bba093f4e905, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.712222] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1374.712537] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1374.712775] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Deleting the datastore file [datastore1] 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1374.713012] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-62669042-6f57-4d77-89bc-06d8245ad143 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.720793] env[61663]: DEBUG oslo_vmware.api [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Waiting for the task: (returnval){ [ 1374.720793] env[61663]: value = "task-1690712" [ 1374.720793] env[61663]: _type = "Task" [ 1374.720793] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.728042] env[61663]: DEBUG oslo_vmware.api [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Task: {'id': task-1690712, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.155168] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1375.155503] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Creating directory with path [datastore1] vmware_temp/0ccfddf4-b089-48fe-a8a3-aa0073ee02e1/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1375.155703] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-32cea40e-d667-4780-920b-d850ea340d40 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.167611] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Created directory with path [datastore1] vmware_temp/0ccfddf4-b089-48fe-a8a3-aa0073ee02e1/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1375.167825] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Fetch image to [datastore1] vmware_temp/0ccfddf4-b089-48fe-a8a3-aa0073ee02e1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1375.167998] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/0ccfddf4-b089-48fe-a8a3-aa0073ee02e1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1375.168800] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f1ea60-3fc7-4942-9cda-dcb2ea619b7b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.175688] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9840cc6-28d1-4398-889f-104900a8823f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.184650] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b6d3179-f2ba-4c00-b54a-9fecbb318a64 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.214408] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96527875-10f4-4400-9825-5815e05348e3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.220378] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-23a3b47a-64fb-45fe-97be-b373e6328564 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.229614] env[61663]: DEBUG oslo_vmware.api [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Task: {'id': task-1690712, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065023} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.229829] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1375.230016] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1375.230193] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1375.230371] env[61663]: INFO nova.compute.manager [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1375.232410] env[61663]: DEBUG nova.compute.claims [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1375.232612] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1375.232798] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.244679] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1375.300111] env[61663]: DEBUG oslo_vmware.rw_handles [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0ccfddf4-b089-48fe-a8a3-aa0073ee02e1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1375.364086] env[61663]: DEBUG oslo_vmware.rw_handles [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1375.364298] env[61663]: DEBUG oslo_vmware.rw_handles [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0ccfddf4-b089-48fe-a8a3-aa0073ee02e1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1375.690293] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8033cca2-873a-445e-b76b-4c4c5f58984f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.697902] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-028c4555-e855-425a-8ea6-de8db0cb504a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.726724] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09bc66aa-66cd-4eb6-8b2c-747760676e9c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.735268] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a3f427-a4dd-4eb1-96e2-5d232e16bbd8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.747766] env[61663]: DEBUG nova.compute.provider_tree [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1375.755468] env[61663]: DEBUG nova.scheduler.client.report [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1375.770094] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.537s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1375.770617] env[61663]: ERROR nova.compute.manager [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1375.770617] env[61663]: Faults: ['InvalidArgument'] [ 1375.770617] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Traceback (most recent call last): [ 1375.770617] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1375.770617] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] self.driver.spawn(context, instance, image_meta, [ 1375.770617] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1375.770617] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1375.770617] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1375.770617] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] self._fetch_image_if_missing(context, vi) [ 1375.770617] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1375.770617] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] image_cache(vi, tmp_image_ds_loc) [ 1375.770617] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1375.770946] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] vm_util.copy_virtual_disk( [ 1375.770946] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1375.770946] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] session._wait_for_task(vmdk_copy_task) [ 1375.770946] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1375.770946] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] return self.wait_for_task(task_ref) [ 1375.770946] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1375.770946] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] return evt.wait() [ 1375.770946] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1375.770946] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] result = hub.switch() [ 1375.770946] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1375.770946] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] return self.greenlet.switch() [ 1375.770946] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1375.770946] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] self.f(*self.args, **self.kw) [ 1375.772149] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1375.772149] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] raise exceptions.translate_fault(task_info.error) [ 1375.772149] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1375.772149] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Faults: ['InvalidArgument'] [ 1375.772149] env[61663]: ERROR nova.compute.manager [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] [ 1375.772149] env[61663]: DEBUG nova.compute.utils [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1375.772702] env[61663]: DEBUG nova.compute.manager [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Build of instance 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246 was re-scheduled: A specified parameter was not correct: fileType [ 1375.772702] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1375.773096] env[61663]: DEBUG nova.compute.manager [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1375.773274] env[61663]: DEBUG nova.compute.manager [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1375.773428] env[61663]: DEBUG nova.compute.manager [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1375.773589] env[61663]: DEBUG nova.network.neutron [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1376.265948] env[61663]: DEBUG nova.network.neutron [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1376.281676] env[61663]: INFO nova.compute.manager [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] [instance: 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246] Took 0.51 seconds to deallocate network for instance. [ 1376.390929] env[61663]: INFO nova.scheduler.client.report [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Deleted allocations for instance 667bfd5b-1331-4ff4-93ee-eaa7c7cc4246 [ 1376.416525] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f7e5db93-ee2c-4199-b3f2-4f217d43111a tempest-ServerDiagnosticsNegativeTest-735271879 tempest-ServerDiagnosticsNegativeTest-735271879-project-member] Lock "667bfd5b-1331-4ff4-93ee-eaa7c7cc4246" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 146.071s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1376.436099] env[61663]: DEBUG nova.compute.manager [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1376.495907] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1376.497280] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.499630] env[61663]: INFO nova.compute.claims [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1376.924304] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f613479-0df8-48f9-9bee-2dca111f40c3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.931817] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e04d30ec-5aae-4a79-ab8c-9d838ca4fa9a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.961963] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2759d21e-7df8-4e3e-a7dd-3640266ec2c2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.969713] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0051f57-223b-45b8-84c3-9ec09298e9b9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.983335] env[61663]: DEBUG nova.compute.provider_tree [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1376.994726] env[61663]: DEBUG nova.scheduler.client.report [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1377.011496] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.515s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.012053] env[61663]: DEBUG nova.compute.manager [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1377.053806] env[61663]: DEBUG nova.compute.utils [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1377.055296] env[61663]: DEBUG nova.compute.manager [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1377.055296] env[61663]: DEBUG nova.network.neutron [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1377.070624] env[61663]: DEBUG nova.compute.manager [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1377.140659] env[61663]: DEBUG nova.compute.manager [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1377.170764] env[61663]: DEBUG nova.virt.hardware [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1377.171033] env[61663]: DEBUG nova.virt.hardware [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1377.171196] env[61663]: DEBUG nova.virt.hardware [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1377.171382] env[61663]: DEBUG nova.virt.hardware [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1377.171531] env[61663]: DEBUG nova.virt.hardware [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1377.171680] env[61663]: DEBUG nova.virt.hardware [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1377.171892] env[61663]: DEBUG nova.virt.hardware [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1377.172146] env[61663]: DEBUG nova.virt.hardware [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1377.172352] env[61663]: DEBUG nova.virt.hardware [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1377.172549] env[61663]: DEBUG nova.virt.hardware [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1377.172741] env[61663]: DEBUG nova.virt.hardware [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1377.173608] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-686c237a-a529-4109-89b0-67930cbd3102 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.182045] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b2b4041-8a6d-42e4-a15d-5d1eba6890dc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.196916] env[61663]: DEBUG nova.policy [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6735265f5f2c481aa99bf7027ba71b04', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f58339a6c9264b97b4f92c5c20c73ad3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1378.021542] env[61663]: DEBUG nova.network.neutron [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Successfully created port: 30250543-611d-428b-89ec-85449c09e46f {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1379.196138] env[61663]: DEBUG nova.network.neutron [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Successfully updated port: 30250543-611d-428b-89ec-85449c09e46f {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1379.209750] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Acquiring lock "refresh_cache-ee0e3e54-c135-489f-87ca-f441efebcbd5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1379.209913] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Acquired lock "refresh_cache-ee0e3e54-c135-489f-87ca-f441efebcbd5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1379.210080] env[61663]: DEBUG nova.network.neutron [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1379.289032] env[61663]: DEBUG nova.network.neutron [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1379.412138] env[61663]: DEBUG nova.compute.manager [req-39507f7e-6725-41f5-8515-cf22cfd2ef25 req-7eef892c-eb2e-4c6a-95e3-1767fbb67ad6 service nova] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Received event network-vif-plugged-30250543-611d-428b-89ec-85449c09e46f {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1379.412138] env[61663]: DEBUG oslo_concurrency.lockutils [req-39507f7e-6725-41f5-8515-cf22cfd2ef25 req-7eef892c-eb2e-4c6a-95e3-1767fbb67ad6 service nova] Acquiring lock "ee0e3e54-c135-489f-87ca-f441efebcbd5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.412138] env[61663]: DEBUG oslo_concurrency.lockutils [req-39507f7e-6725-41f5-8515-cf22cfd2ef25 req-7eef892c-eb2e-4c6a-95e3-1767fbb67ad6 service nova] Lock "ee0e3e54-c135-489f-87ca-f441efebcbd5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1379.412138] env[61663]: DEBUG oslo_concurrency.lockutils [req-39507f7e-6725-41f5-8515-cf22cfd2ef25 req-7eef892c-eb2e-4c6a-95e3-1767fbb67ad6 service nova] Lock "ee0e3e54-c135-489f-87ca-f441efebcbd5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.412520] env[61663]: DEBUG nova.compute.manager [req-39507f7e-6725-41f5-8515-cf22cfd2ef25 req-7eef892c-eb2e-4c6a-95e3-1767fbb67ad6 service nova] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] No waiting events found dispatching network-vif-plugged-30250543-611d-428b-89ec-85449c09e46f {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1379.412520] env[61663]: WARNING nova.compute.manager [req-39507f7e-6725-41f5-8515-cf22cfd2ef25 req-7eef892c-eb2e-4c6a-95e3-1767fbb67ad6 service nova] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Received unexpected event network-vif-plugged-30250543-611d-428b-89ec-85449c09e46f for instance with vm_state building and task_state spawning. [ 1379.610403] env[61663]: DEBUG nova.network.neutron [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Updating instance_info_cache with network_info: [{"id": "30250543-611d-428b-89ec-85449c09e46f", "address": "fa:16:3e:61:99:f6", "network": {"id": "3c794bfb-a492-472f-ae53-e485028aa5ce", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1761716823-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f58339a6c9264b97b4f92c5c20c73ad3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30250543-61", "ovs_interfaceid": "30250543-611d-428b-89ec-85449c09e46f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1379.630018] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Releasing lock "refresh_cache-ee0e3e54-c135-489f-87ca-f441efebcbd5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1379.630018] env[61663]: DEBUG nova.compute.manager [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Instance network_info: |[{"id": "30250543-611d-428b-89ec-85449c09e46f", "address": "fa:16:3e:61:99:f6", "network": {"id": "3c794bfb-a492-472f-ae53-e485028aa5ce", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1761716823-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f58339a6c9264b97b4f92c5c20c73ad3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30250543-61", "ovs_interfaceid": "30250543-611d-428b-89ec-85449c09e46f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1379.630315] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:99:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd7d0d95-6848-4e69-ac21-75f8db82a3b5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '30250543-611d-428b-89ec-85449c09e46f', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1379.637884] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Creating folder: Project (f58339a6c9264b97b4f92c5c20c73ad3). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1379.638725] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d4ff65f7-4964-4940-b3c5-9b6e81c4d516 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.651115] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Created folder: Project (f58339a6c9264b97b4f92c5c20c73ad3) in parent group-v352575. [ 1379.651115] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Creating folder: Instances. Parent ref: group-v352614. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1379.651115] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d750a1ae-dd42-41d3-9c94-ac565d87ef32 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.658023] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Created folder: Instances in parent group-v352614. [ 1379.659049] env[61663]: DEBUG oslo.service.loopingcall [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1379.659446] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1379.660059] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ca2c787f-26d7-446b-b21b-5e81fef629ba {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.680331] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1379.680331] env[61663]: value = "task-1690715" [ 1379.680331] env[61663]: _type = "Task" [ 1379.680331] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.689290] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690715, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.191322] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690715, 'name': CreateVM_Task, 'duration_secs': 0.329301} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.191677] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1380.193209] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1380.193209] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1380.193209] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1380.193406] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e94f5313-b09e-450c-a46b-04a3f22d0f01 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.200533] env[61663]: DEBUG oslo_vmware.api [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Waiting for the task: (returnval){ [ 1380.200533] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5271b1ac-7d4e-eaae-1e55-c93783e4bd11" [ 1380.200533] env[61663]: _type = "Task" [ 1380.200533] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.212220] env[61663]: DEBUG oslo_vmware.api [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5271b1ac-7d4e-eaae-1e55-c93783e4bd11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.713475] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1380.713475] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1380.713600] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.656784] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Acquiring lock "47e1551e-ac80-4b4e-b568-3931c6dcf3b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.657160] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Lock "47e1551e-ac80-4b4e-b568-3931c6dcf3b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.701867] env[61663]: DEBUG nova.compute.manager [req-b2ae39bd-18e3-4962-8bfb-60ccaf2cc050 req-2ab14e65-a7b2-46c7-99d3-7806af527c1e service nova] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Received event network-changed-30250543-611d-428b-89ec-85449c09e46f {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1381.702097] env[61663]: DEBUG nova.compute.manager [req-b2ae39bd-18e3-4962-8bfb-60ccaf2cc050 req-2ab14e65-a7b2-46c7-99d3-7806af527c1e service nova] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Refreshing instance network info cache due to event network-changed-30250543-611d-428b-89ec-85449c09e46f. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1381.702367] env[61663]: DEBUG oslo_concurrency.lockutils [req-b2ae39bd-18e3-4962-8bfb-60ccaf2cc050 req-2ab14e65-a7b2-46c7-99d3-7806af527c1e service nova] Acquiring lock "refresh_cache-ee0e3e54-c135-489f-87ca-f441efebcbd5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.702579] env[61663]: DEBUG oslo_concurrency.lockutils [req-b2ae39bd-18e3-4962-8bfb-60ccaf2cc050 req-2ab14e65-a7b2-46c7-99d3-7806af527c1e service nova] Acquired lock "refresh_cache-ee0e3e54-c135-489f-87ca-f441efebcbd5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.702681] env[61663]: DEBUG nova.network.neutron [req-b2ae39bd-18e3-4962-8bfb-60ccaf2cc050 req-2ab14e65-a7b2-46c7-99d3-7806af527c1e service nova] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Refreshing network info cache for port 30250543-611d-428b-89ec-85449c09e46f {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1382.171052] env[61663]: DEBUG nova.network.neutron [req-b2ae39bd-18e3-4962-8bfb-60ccaf2cc050 req-2ab14e65-a7b2-46c7-99d3-7806af527c1e service nova] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Updated VIF entry in instance network info cache for port 30250543-611d-428b-89ec-85449c09e46f. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1382.172102] env[61663]: DEBUG nova.network.neutron [req-b2ae39bd-18e3-4962-8bfb-60ccaf2cc050 req-2ab14e65-a7b2-46c7-99d3-7806af527c1e service nova] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Updating instance_info_cache with network_info: [{"id": "30250543-611d-428b-89ec-85449c09e46f", "address": "fa:16:3e:61:99:f6", "network": {"id": "3c794bfb-a492-472f-ae53-e485028aa5ce", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1761716823-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f58339a6c9264b97b4f92c5c20c73ad3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd7d0d95-6848-4e69-ac21-75f8db82a3b5", "external-id": "nsx-vlan-transportzone-272", "segmentation_id": 272, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30250543-61", "ovs_interfaceid": "30250543-611d-428b-89ec-85449c09e46f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.183117] env[61663]: DEBUG oslo_concurrency.lockutils [req-b2ae39bd-18e3-4962-8bfb-60ccaf2cc050 req-2ab14e65-a7b2-46c7-99d3-7806af527c1e service nova] Releasing lock "refresh_cache-ee0e3e54-c135-489f-87ca-f441efebcbd5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1407.692859] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1408.692466] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1408.692657] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1408.692777] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1408.713749] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1408.714123] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1408.714123] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1408.714202] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1408.714332] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1408.714439] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1408.714559] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1408.714678] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1408.714797] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1408.715037] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1408.715182] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1408.715659] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1409.692079] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1409.704451] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1409.704628] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1409.704792] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1409.704970] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1409.706096] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d9a3956-0597-4322-9b1f-b61f98d6f623 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.714793] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a16baaec-92c8-44ec-89ed-2103d78309c5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.728019] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c142d55-a269-4141-8cff-2d40c742c77c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.733943] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2798d6d2-e566-49be-a2b7-92f9a1042e7d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.763758] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181320MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1409.763910] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1409.764121] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1409.838155] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 29bee3d3-a6d2-43a9-8439-d5b842214cf1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1409.838272] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 1c36f4af-0222-48d3-ac90-776f7fe807de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1409.838418] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5e748c4b-03c5-4a88-a4ed-27093f2aef47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1409.838523] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 71509f58-5616-4d6a-9a88-3bfd9d414a0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1409.838671] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 790791ee-4e6c-4116-8ade-ba61f55ebd4d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1409.838814] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance f9a675b6-e76d-492b-ac34-3c7b10553fca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1409.838955] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1409.839108] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 6f7a3a1f-859d-42f5-b986-6a1a038ca536 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1409.839247] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1409.839395] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ee0e3e54-c135-489f-87ca-f441efebcbd5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1409.851345] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 04488672-86c4-415b-961e-94641d570112 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1409.862054] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance fffb383d-e1db-4640-9201-0ea897c472d9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1409.872182] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 4ef40272-deb2-414a-b0ac-0bc30ba2bc84 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1409.882146] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 879dce1e-340a-48d0-9291-857f39fec597 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1409.893364] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance f8dbebee-1ccc-4af8-a302-75e2f819161d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1409.904439] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 10cb6a2e-4a08-453e-9372-4ea14958470d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1409.915083] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 29c17291-50be-45a3-93c6-76bc8c7cbbb4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1409.923389] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance fde11c71-3511-4fea-84c7-0e7de062951b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1409.933564] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 7a7a0ef0-bbea-42c0-b96e-4efc4207a655 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1409.943887] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 71250f0d-8e81-444b-bed7-792f229a19a5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1409.953536] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 6c3af584-cfad-4645-8246-d0366ddb8775 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1409.965168] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 55e8066d-cb8f-4731-b5bd-57adfebae81a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1409.975753] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance e56bdc9e-964f-4994-be20-9a981095f813 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1409.986044] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5d2229e3-08b2-432b-98b5-95cc9f6e649f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1409.996816] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 0c42630f-9e39-4eeb-aa56-d953cff6b4a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1410.010488] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 562e2e30-99d6-4edd-8382-e2e765c6a449 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1410.020506] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ffe47fec-5f84-4a9a-a103-e59a90201064 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1410.030727] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 25222911-beaf-4f80-be5e-a6decd09958d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1410.040233] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 4e94548a-c81b-46c1-886b-bb5e2b8ebf9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1410.065872] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 9f599f13-3906-4766-a1a5-a324da916370 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1410.076711] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5def97c4-7d72-4ade-bb17-160e91f67f75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1410.086419] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 1305216b-0ee5-499a-a82a-30b45a8c832c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1410.110680] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 47e1551e-ac80-4b4e-b568-3931c6dcf3b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1410.111656] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1410.111656] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1410.490197] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82165aeb-9d94-417d-81ed-076f11e2b7e6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.498954] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1bf1a8-9edf-4eef-8837-4e120d120c61 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.528324] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4591c6-2482-4ff3-972c-58b2aca173b5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.535595] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1227b04e-4cf8-494c-a79d-6d63c230c887 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.548580] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1410.557665] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1410.576322] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1410.576322] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.812s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1411.572508] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1411.691805] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1412.692343] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1413.692427] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1413.692427] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1414.692858] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1415.687773] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1424.119827] env[61663]: WARNING oslo_vmware.rw_handles [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1424.119827] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1424.119827] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1424.119827] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1424.119827] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1424.119827] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 1424.119827] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1424.119827] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1424.119827] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1424.119827] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1424.119827] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1424.119827] env[61663]: ERROR oslo_vmware.rw_handles [ 1424.120405] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/0ccfddf4-b089-48fe-a8a3-aa0073ee02e1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1424.121996] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1424.122231] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Copying Virtual Disk [datastore1] vmware_temp/0ccfddf4-b089-48fe-a8a3-aa0073ee02e1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/0ccfddf4-b089-48fe-a8a3-aa0073ee02e1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1424.122514] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-096bce2e-3c72-462b-9fb5-617a41888459 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.131696] env[61663]: DEBUG oslo_vmware.api [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Waiting for the task: (returnval){ [ 1424.131696] env[61663]: value = "task-1690716" [ 1424.131696] env[61663]: _type = "Task" [ 1424.131696] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.140062] env[61663]: DEBUG oslo_vmware.api [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Task: {'id': task-1690716, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.642585] env[61663]: DEBUG oslo_vmware.exceptions [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1424.642877] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1424.643450] env[61663]: ERROR nova.compute.manager [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1424.643450] env[61663]: Faults: ['InvalidArgument'] [ 1424.643450] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Traceback (most recent call last): [ 1424.643450] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1424.643450] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] yield resources [ 1424.643450] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1424.643450] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] self.driver.spawn(context, instance, image_meta, [ 1424.643450] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1424.643450] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1424.643450] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1424.643450] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] self._fetch_image_if_missing(context, vi) [ 1424.643450] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1424.643839] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] image_cache(vi, tmp_image_ds_loc) [ 1424.643839] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1424.643839] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] vm_util.copy_virtual_disk( [ 1424.643839] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1424.643839] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] session._wait_for_task(vmdk_copy_task) [ 1424.643839] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1424.643839] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] return self.wait_for_task(task_ref) [ 1424.643839] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1424.643839] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] return evt.wait() [ 1424.643839] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1424.643839] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] result = hub.switch() [ 1424.643839] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1424.643839] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] return self.greenlet.switch() [ 1424.644180] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1424.644180] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] self.f(*self.args, **self.kw) [ 1424.644180] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1424.644180] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] raise exceptions.translate_fault(task_info.error) [ 1424.644180] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1424.644180] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Faults: ['InvalidArgument'] [ 1424.644180] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] [ 1424.644180] env[61663]: INFO nova.compute.manager [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Terminating instance [ 1424.645404] env[61663]: DEBUG oslo_concurrency.lockutils [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.646029] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1424.646634] env[61663]: DEBUG nova.compute.manager [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1424.646898] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1424.647203] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-db8eee4e-bd02-4931-906b-be4386e11e23 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.649506] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7560dd0d-91f5-4e9d-810f-8dccd0d1dcda {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.656590] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1424.656888] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3d88a57d-9b96-47f5-8969-1bc204c1baa8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.659047] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1424.659292] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1424.660299] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afb9e538-5df1-4645-aabe-57e785ef73b1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.665287] env[61663]: DEBUG oslo_vmware.api [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Waiting for the task: (returnval){ [ 1424.665287] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e827aa-5b75-ca3f-60e9-c0532507b39a" [ 1424.665287] env[61663]: _type = "Task" [ 1424.665287] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.673359] env[61663]: DEBUG oslo_vmware.api [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e827aa-5b75-ca3f-60e9-c0532507b39a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.736903] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1424.737193] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1424.737413] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Deleting the datastore file [datastore1] 29bee3d3-a6d2-43a9-8439-d5b842214cf1 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1424.737707] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3855dc2c-9a09-474f-901c-dbb26fc0eb40 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.744637] env[61663]: DEBUG oslo_vmware.api [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Waiting for the task: (returnval){ [ 1424.744637] env[61663]: value = "task-1690718" [ 1424.744637] env[61663]: _type = "Task" [ 1424.744637] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.752558] env[61663]: DEBUG oslo_vmware.api [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Task: {'id': task-1690718, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.175939] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1425.176287] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Creating directory with path [datastore1] vmware_temp/ef9886c7-521c-4b67-b62a-0e988eb066c4/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1425.176483] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b4311abc-2ae2-41ac-89b7-28605a5cdd33 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.187944] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Created directory with path [datastore1] vmware_temp/ef9886c7-521c-4b67-b62a-0e988eb066c4/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1425.188195] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Fetch image to [datastore1] vmware_temp/ef9886c7-521c-4b67-b62a-0e988eb066c4/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1425.188439] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/ef9886c7-521c-4b67-b62a-0e988eb066c4/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1425.189140] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-142729e7-ddf8-42d9-84dc-0a0ad18e6a14 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.196888] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e9380b-2303-40b6-a0fe-0febe1e912dc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.206140] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ac4821-8b12-42df-aacc-1393ff8bc4d3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.236816] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13f3e6d4-db5d-4fa1-974d-8ec4aaa4f5e2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.242490] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c0635367-1350-4328-a500-dd4b95dfb1cc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.252936] env[61663]: DEBUG oslo_vmware.api [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Task: {'id': task-1690718, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076825} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.253258] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1425.253468] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1425.253670] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1425.253878] env[61663]: INFO nova.compute.manager [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1425.256029] env[61663]: DEBUG nova.compute.claims [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1425.256221] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.256495] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.271676] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1425.329768] env[61663]: DEBUG oslo_vmware.rw_handles [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ef9886c7-521c-4b67-b62a-0e988eb066c4/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1425.396619] env[61663]: DEBUG oslo_vmware.rw_handles [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1425.396619] env[61663]: DEBUG oslo_vmware.rw_handles [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ef9886c7-521c-4b67-b62a-0e988eb066c4/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1425.727916] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcfa81e8-4398-46f7-ae48-f1ca61ab4da4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.735464] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f5e859-ee67-4d4e-9ed3-fc7924291f71 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.764369] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ce6a4e-3c0a-4f79-8990-ad7dbb1ebc04 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.771668] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10de5b08-e188-4b77-90bd-2a2163b32776 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.785334] env[61663]: DEBUG nova.compute.provider_tree [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1425.793853] env[61663]: DEBUG nova.scheduler.client.report [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1425.807155] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.551s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.807722] env[61663]: ERROR nova.compute.manager [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1425.807722] env[61663]: Faults: ['InvalidArgument'] [ 1425.807722] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Traceback (most recent call last): [ 1425.807722] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1425.807722] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] self.driver.spawn(context, instance, image_meta, [ 1425.807722] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1425.807722] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1425.807722] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1425.807722] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] self._fetch_image_if_missing(context, vi) [ 1425.807722] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1425.807722] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] image_cache(vi, tmp_image_ds_loc) [ 1425.807722] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1425.808122] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] vm_util.copy_virtual_disk( [ 1425.808122] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1425.808122] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] session._wait_for_task(vmdk_copy_task) [ 1425.808122] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1425.808122] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] return self.wait_for_task(task_ref) [ 1425.808122] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1425.808122] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] return evt.wait() [ 1425.808122] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1425.808122] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] result = hub.switch() [ 1425.808122] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1425.808122] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] return self.greenlet.switch() [ 1425.808122] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1425.808122] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] self.f(*self.args, **self.kw) [ 1425.808489] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1425.808489] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] raise exceptions.translate_fault(task_info.error) [ 1425.808489] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1425.808489] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Faults: ['InvalidArgument'] [ 1425.808489] env[61663]: ERROR nova.compute.manager [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] [ 1425.808489] env[61663]: DEBUG nova.compute.utils [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1425.809809] env[61663]: DEBUG nova.compute.manager [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Build of instance 29bee3d3-a6d2-43a9-8439-d5b842214cf1 was re-scheduled: A specified parameter was not correct: fileType [ 1425.809809] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1425.810190] env[61663]: DEBUG nova.compute.manager [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1425.810364] env[61663]: DEBUG nova.compute.manager [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1425.810518] env[61663]: DEBUG nova.compute.manager [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1425.810678] env[61663]: DEBUG nova.network.neutron [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1426.393860] env[61663]: DEBUG nova.network.neutron [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1426.409016] env[61663]: INFO nova.compute.manager [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] [instance: 29bee3d3-a6d2-43a9-8439-d5b842214cf1] Took 0.60 seconds to deallocate network for instance. [ 1426.518779] env[61663]: INFO nova.scheduler.client.report [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Deleted allocations for instance 29bee3d3-a6d2-43a9-8439-d5b842214cf1 [ 1426.548389] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3fec3732-9d3b-4caf-8807-63d359a103e1 tempest-ServerDiagnosticsTest-447360169 tempest-ServerDiagnosticsTest-447360169-project-member] Lock "29bee3d3-a6d2-43a9-8439-d5b842214cf1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 196.054s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.565343] env[61663]: DEBUG nova.compute.manager [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1426.624396] env[61663]: DEBUG oslo_concurrency.lockutils [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.624652] env[61663]: DEBUG oslo_concurrency.lockutils [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.626199] env[61663]: INFO nova.compute.claims [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1427.056651] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8818f870-b3d6-4655-8f4c-ba203c25e024 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.064118] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed80cf6-a89d-49e9-bfbe-a1b262de28c1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.093757] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a2def7e-cf85-4010-8adb-0c217e0c6cdc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.100930] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da15a01b-a78f-4268-a0aa-6ed07334c589 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.113756] env[61663]: DEBUG nova.compute.provider_tree [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1427.122433] env[61663]: DEBUG nova.scheduler.client.report [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1427.135477] env[61663]: DEBUG oslo_concurrency.lockutils [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.511s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1427.135940] env[61663]: DEBUG nova.compute.manager [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1427.175057] env[61663]: DEBUG nova.compute.utils [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1427.177101] env[61663]: DEBUG nova.compute.manager [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1427.177292] env[61663]: DEBUG nova.network.neutron [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1427.186528] env[61663]: DEBUG nova.compute.manager [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1427.252067] env[61663]: DEBUG nova.compute.manager [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1427.255542] env[61663]: DEBUG nova.policy [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2e2dde1cc9fd4390a38b6976e3aa2357', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '13c05cdcd6e14adea60f0075b182b2b0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1427.278760] env[61663]: DEBUG nova.virt.hardware [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1427.279017] env[61663]: DEBUG nova.virt.hardware [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1427.279185] env[61663]: DEBUG nova.virt.hardware [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1427.279370] env[61663]: DEBUG nova.virt.hardware [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1427.279518] env[61663]: DEBUG nova.virt.hardware [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1427.279666] env[61663]: DEBUG nova.virt.hardware [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1427.279875] env[61663]: DEBUG nova.virt.hardware [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1427.280046] env[61663]: DEBUG nova.virt.hardware [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1427.280222] env[61663]: DEBUG nova.virt.hardware [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1427.280387] env[61663]: DEBUG nova.virt.hardware [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1427.280563] env[61663]: DEBUG nova.virt.hardware [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1427.281420] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c19a93e-5047-47c7-a464-bf16e70a49ad {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.289540] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae49669-6501-4409-826f-ae449143ca92 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.828173] env[61663]: DEBUG nova.network.neutron [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Successfully created port: 12949ec4-6708-4560-9e6b-e69bef449602 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1429.218717] env[61663]: DEBUG nova.network.neutron [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Successfully updated port: 12949ec4-6708-4560-9e6b-e69bef449602 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1429.236689] env[61663]: DEBUG oslo_concurrency.lockutils [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Acquiring lock "refresh_cache-04488672-86c4-415b-961e-94641d570112" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1429.236827] env[61663]: DEBUG oslo_concurrency.lockutils [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Acquired lock "refresh_cache-04488672-86c4-415b-961e-94641d570112" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1429.236978] env[61663]: DEBUG nova.network.neutron [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1429.318714] env[61663]: DEBUG nova.network.neutron [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1429.517340] env[61663]: DEBUG nova.compute.manager [req-f3c54a5e-2214-4d09-9ac7-911f1667b901 req-6b151d39-d52a-470b-b1d2-027acea32b46 service nova] [instance: 04488672-86c4-415b-961e-94641d570112] Received event network-vif-plugged-12949ec4-6708-4560-9e6b-e69bef449602 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1429.517644] env[61663]: DEBUG oslo_concurrency.lockutils [req-f3c54a5e-2214-4d09-9ac7-911f1667b901 req-6b151d39-d52a-470b-b1d2-027acea32b46 service nova] Acquiring lock "04488672-86c4-415b-961e-94641d570112-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1429.517918] env[61663]: DEBUG oslo_concurrency.lockutils [req-f3c54a5e-2214-4d09-9ac7-911f1667b901 req-6b151d39-d52a-470b-b1d2-027acea32b46 service nova] Lock "04488672-86c4-415b-961e-94641d570112-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1429.518162] env[61663]: DEBUG oslo_concurrency.lockutils [req-f3c54a5e-2214-4d09-9ac7-911f1667b901 req-6b151d39-d52a-470b-b1d2-027acea32b46 service nova] Lock "04488672-86c4-415b-961e-94641d570112-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1429.518395] env[61663]: DEBUG nova.compute.manager [req-f3c54a5e-2214-4d09-9ac7-911f1667b901 req-6b151d39-d52a-470b-b1d2-027acea32b46 service nova] [instance: 04488672-86c4-415b-961e-94641d570112] No waiting events found dispatching network-vif-plugged-12949ec4-6708-4560-9e6b-e69bef449602 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1429.518646] env[61663]: WARNING nova.compute.manager [req-f3c54a5e-2214-4d09-9ac7-911f1667b901 req-6b151d39-d52a-470b-b1d2-027acea32b46 service nova] [instance: 04488672-86c4-415b-961e-94641d570112] Received unexpected event network-vif-plugged-12949ec4-6708-4560-9e6b-e69bef449602 for instance with vm_state building and task_state spawning. [ 1429.620620] env[61663]: DEBUG nova.network.neutron [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Updating instance_info_cache with network_info: [{"id": "12949ec4-6708-4560-9e6b-e69bef449602", "address": "fa:16:3e:b3:c7:92", "network": {"id": "ed56da72-b8e7-498c-80d3-a5b50338843c", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-711162665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13c05cdcd6e14adea60f0075b182b2b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12949ec4-67", "ovs_interfaceid": "12949ec4-6708-4560-9e6b-e69bef449602", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.631998] env[61663]: DEBUG oslo_concurrency.lockutils [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Releasing lock "refresh_cache-04488672-86c4-415b-961e-94641d570112" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1429.632330] env[61663]: DEBUG nova.compute.manager [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Instance network_info: |[{"id": "12949ec4-6708-4560-9e6b-e69bef449602", "address": "fa:16:3e:b3:c7:92", "network": {"id": "ed56da72-b8e7-498c-80d3-a5b50338843c", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-711162665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13c05cdcd6e14adea60f0075b182b2b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12949ec4-67", "ovs_interfaceid": "12949ec4-6708-4560-9e6b-e69bef449602", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1429.632761] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:c7:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e547d234-640c-449b-8279-0b16f75d6627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '12949ec4-6708-4560-9e6b-e69bef449602', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1429.640542] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Creating folder: Project (13c05cdcd6e14adea60f0075b182b2b0). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1429.641090] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ae3842cd-f3c3-4f87-a3bf-566780a2f66a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.653880] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Created folder: Project (13c05cdcd6e14adea60f0075b182b2b0) in parent group-v352575. [ 1429.654113] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Creating folder: Instances. Parent ref: group-v352617. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1429.654723] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bb66f6a2-9304-40cc-bf2c-cf905d3261af {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.664171] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Created folder: Instances in parent group-v352617. [ 1429.664439] env[61663]: DEBUG oslo.service.loopingcall [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1429.664655] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04488672-86c4-415b-961e-94641d570112] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1429.664874] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fa6b7989-6f98-4940-861a-7f95621552d6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.683305] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1429.683305] env[61663]: value = "task-1690721" [ 1429.683305] env[61663]: _type = "Task" [ 1429.683305] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.690496] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690721, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.109013] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Acquiring lock "689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1430.109272] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Lock "689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1430.193967] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690721, 'name': CreateVM_Task, 'duration_secs': 0.303301} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.194163] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04488672-86c4-415b-961e-94641d570112] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1430.194928] env[61663]: DEBUG oslo_concurrency.lockutils [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1430.195515] env[61663]: DEBUG oslo_concurrency.lockutils [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1430.195616] env[61663]: DEBUG oslo_concurrency.lockutils [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1430.197364] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60f2356d-dd05-472b-a61f-31164417b5d2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.204097] env[61663]: DEBUG oslo_vmware.api [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Waiting for the task: (returnval){ [ 1430.204097] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527d342a-f5b5-48e4-7510-4a9b794182ee" [ 1430.204097] env[61663]: _type = "Task" [ 1430.204097] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.210263] env[61663]: DEBUG oslo_vmware.api [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527d342a-f5b5-48e4-7510-4a9b794182ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.449578] env[61663]: DEBUG oslo_concurrency.lockutils [None req-383046b4-44f0-4925-84d6-231402c9ee3b tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Acquiring lock "1c36f4af-0222-48d3-ac90-776f7fe807de" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1430.712123] env[61663]: DEBUG oslo_concurrency.lockutils [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1430.712386] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1430.712717] env[61663]: DEBUG oslo_concurrency.lockutils [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.726038] env[61663]: DEBUG nova.compute.manager [req-89920099-387b-4d4e-9aaa-3efc102c1b88 req-c8f01980-e823-4996-975c-2ea9a4cef4ee service nova] [instance: 04488672-86c4-415b-961e-94641d570112] Received event network-changed-12949ec4-6708-4560-9e6b-e69bef449602 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1431.726303] env[61663]: DEBUG nova.compute.manager [req-89920099-387b-4d4e-9aaa-3efc102c1b88 req-c8f01980-e823-4996-975c-2ea9a4cef4ee service nova] [instance: 04488672-86c4-415b-961e-94641d570112] Refreshing instance network info cache due to event network-changed-12949ec4-6708-4560-9e6b-e69bef449602. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1431.726455] env[61663]: DEBUG oslo_concurrency.lockutils [req-89920099-387b-4d4e-9aaa-3efc102c1b88 req-c8f01980-e823-4996-975c-2ea9a4cef4ee service nova] Acquiring lock "refresh_cache-04488672-86c4-415b-961e-94641d570112" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.726617] env[61663]: DEBUG oslo_concurrency.lockutils [req-89920099-387b-4d4e-9aaa-3efc102c1b88 req-c8f01980-e823-4996-975c-2ea9a4cef4ee service nova] Acquired lock "refresh_cache-04488672-86c4-415b-961e-94641d570112" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.726783] env[61663]: DEBUG nova.network.neutron [req-89920099-387b-4d4e-9aaa-3efc102c1b88 req-c8f01980-e823-4996-975c-2ea9a4cef4ee service nova] [instance: 04488672-86c4-415b-961e-94641d570112] Refreshing network info cache for port 12949ec4-6708-4560-9e6b-e69bef449602 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1432.207577] env[61663]: DEBUG nova.network.neutron [req-89920099-387b-4d4e-9aaa-3efc102c1b88 req-c8f01980-e823-4996-975c-2ea9a4cef4ee service nova] [instance: 04488672-86c4-415b-961e-94641d570112] Updated VIF entry in instance network info cache for port 12949ec4-6708-4560-9e6b-e69bef449602. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1432.207946] env[61663]: DEBUG nova.network.neutron [req-89920099-387b-4d4e-9aaa-3efc102c1b88 req-c8f01980-e823-4996-975c-2ea9a4cef4ee service nova] [instance: 04488672-86c4-415b-961e-94641d570112] Updating instance_info_cache with network_info: [{"id": "12949ec4-6708-4560-9e6b-e69bef449602", "address": "fa:16:3e:b3:c7:92", "network": {"id": "ed56da72-b8e7-498c-80d3-a5b50338843c", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-711162665-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13c05cdcd6e14adea60f0075b182b2b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e547d234-640c-449b-8279-0b16f75d6627", "external-id": "nsx-vlan-transportzone-539", "segmentation_id": 539, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12949ec4-67", "ovs_interfaceid": "12949ec4-6708-4560-9e6b-e69bef449602", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.217266] env[61663]: DEBUG oslo_concurrency.lockutils [req-89920099-387b-4d4e-9aaa-3efc102c1b88 req-c8f01980-e823-4996-975c-2ea9a4cef4ee service nova] Releasing lock "refresh_cache-04488672-86c4-415b-961e-94641d570112" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1432.647546] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5e4074de-db31-4601-ae78-a3f9fd68c304 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Acquiring lock "5e748c4b-03c5-4a88-a4ed-27093f2aef47" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1442.614718] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e3c2b17e-970a-4257-abb9-3b2d97c883da tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Acquiring lock "71509f58-5616-4d6a-9a88-3bfd9d414a0c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.414541] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b9156ad7-c30a-4486-8c9c-67f574100745 tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Acquiring lock "f9a675b6-e76d-492b-ac34-3c7b10553fca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1450.547133] env[61663]: DEBUG oslo_concurrency.lockutils [None req-8e3b8f34-dedb-4368-adee-342155b1ca72 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Acquiring lock "6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.317544] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9bfc4456-87c3-4391-9b49-2d943f9a41b1 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Acquiring lock "6f7a3a1f-859d-42f5-b986-6a1a038ca536" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1454.366639] env[61663]: DEBUG oslo_concurrency.lockutils [None req-086b304c-3834-4a51-bc5e-e4063ef40d29 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "98eb6b3f-69c8-4837-9b5c-a1485fe5cab6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1456.364766] env[61663]: DEBUG oslo_concurrency.lockutils [None req-50e5b227-1637-4ace-86a7-78dfd9c9ca32 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Acquiring lock "ee0e3e54-c135-489f-87ca-f441efebcbd5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1457.908639] env[61663]: DEBUG oslo_concurrency.lockutils [None req-97f131d4-3dc2-436b-8cd5-ffe2c9a1d660 tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Acquiring lock "04488672-86c4-415b-961e-94641d570112" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1465.842332] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0d29d93f-4437-4922-93b2-096a8d63acf9 tempest-AttachInterfacesTestJSON-1267786142 tempest-AttachInterfacesTestJSON-1267786142-project-member] Acquiring lock "87c4b17f-9890-44fe-9974-0f6c45e316d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1465.842594] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0d29d93f-4437-4922-93b2-096a8d63acf9 tempest-AttachInterfacesTestJSON-1267786142 tempest-AttachInterfacesTestJSON-1267786142-project-member] Lock "87c4b17f-9890-44fe-9974-0f6c45e316d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.692621] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1468.692934] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1468.692999] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1468.718781] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1468.718994] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1468.719091] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1468.719203] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1468.719377] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1468.719465] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1468.719573] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1468.719693] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1468.719812] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1468.719929] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 04488672-86c4-415b-961e-94641d570112] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1468.720067] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1468.720556] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1468.720753] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1468.720882] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Cleaning up deleted instances {{(pid=61663) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1468.736614] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] There are 0 instances to clean {{(pid=61663) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1469.707900] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1469.721274] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1469.721524] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1469.721699] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.721861] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1469.723397] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7915b9a9-fbad-49b4-9f01-ce4ec32fb74c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.732921] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9817a957-02e5-4de8-b34e-b09a02a377c8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.747665] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-398e0a4d-db11-4099-9936-bd4e224c4ebd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.754753] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6e8246-c9bb-48e3-aa6e-c84b1b5e1056 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.785112] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181294MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1469.785277] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1469.785474] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1469.961301] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 1c36f4af-0222-48d3-ac90-776f7fe807de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1469.961519] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5e748c4b-03c5-4a88-a4ed-27093f2aef47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1469.961729] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 71509f58-5616-4d6a-9a88-3bfd9d414a0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1469.961878] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 790791ee-4e6c-4116-8ade-ba61f55ebd4d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1469.962016] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance f9a675b6-e76d-492b-ac34-3c7b10553fca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1469.962139] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1469.962263] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 6f7a3a1f-859d-42f5-b986-6a1a038ca536 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1469.962380] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1469.962498] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ee0e3e54-c135-489f-87ca-f441efebcbd5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1469.962616] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 04488672-86c4-415b-961e-94641d570112 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1469.976357] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 879dce1e-340a-48d0-9291-857f39fec597 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1469.988795] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance f8dbebee-1ccc-4af8-a302-75e2f819161d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1469.999996] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 10cb6a2e-4a08-453e-9372-4ea14958470d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.017571] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance fde11c71-3511-4fea-84c7-0e7de062951b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.028011] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 7a7a0ef0-bbea-42c0-b96e-4efc4207a655 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.041979] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 71250f0d-8e81-444b-bed7-792f229a19a5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.049886] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 6c3af584-cfad-4645-8246-d0366ddb8775 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.061340] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 55e8066d-cb8f-4731-b5bd-57adfebae81a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.072464] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance e56bdc9e-964f-4994-be20-9a981095f813 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.084539] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5d2229e3-08b2-432b-98b5-95cc9f6e649f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.098549] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 0c42630f-9e39-4eeb-aa56-d953cff6b4a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.111030] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 562e2e30-99d6-4edd-8382-e2e765c6a449 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.119927] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ffe47fec-5f84-4a9a-a103-e59a90201064 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.137153] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 25222911-beaf-4f80-be5e-a6decd09958d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.146145] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 4e94548a-c81b-46c1-886b-bb5e2b8ebf9d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.157646] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 9f599f13-3906-4766-a1a5-a324da916370 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.169808] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5def97c4-7d72-4ade-bb17-160e91f67f75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.180084] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 1305216b-0ee5-499a-a82a-30b45a8c832c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.191056] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 47e1551e-ac80-4b4e-b568-3931c6dcf3b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.201723] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.213046] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 87c4b17f-9890-44fe-9974-0f6c45e316d3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.213295] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1470.213442] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1470.231247] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Refreshing inventories for resource provider b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1470.245945] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Updating ProviderTree inventory for provider b47d006d-a9bd-461e-a5d9-39811f005278 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1470.246143] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Updating inventory in ProviderTree for provider b47d006d-a9bd-461e-a5d9-39811f005278 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1470.256820] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Refreshing aggregate associations for resource provider b47d006d-a9bd-461e-a5d9-39811f005278, aggregates: None {{(pid=61663) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1470.274649] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Refreshing trait associations for resource provider b47d006d-a9bd-461e-a5d9-39811f005278, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61663) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1470.731692] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8156e7e2-a924-410c-acf6-3e67d7988653 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.739934] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47482e46-fdaa-4197-9e2c-15db9713bc78 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.773274] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b29c6388-dddf-4c68-a448-75cbf76d8a66 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.780940] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a21cdbad-ffbd-44cd-9ef5-859b43101151 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.795239] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1470.805902] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1470.827342] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1470.827554] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.042s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.404542] env[61663]: WARNING oslo_vmware.rw_handles [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1471.404542] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1471.404542] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1471.404542] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1471.404542] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1471.404542] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 1471.404542] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1471.404542] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1471.404542] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1471.404542] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1471.404542] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1471.404542] env[61663]: ERROR oslo_vmware.rw_handles [ 1471.405081] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/ef9886c7-521c-4b67-b62a-0e988eb066c4/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1471.411222] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1471.411495] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Copying Virtual Disk [datastore1] vmware_temp/ef9886c7-521c-4b67-b62a-0e988eb066c4/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/ef9886c7-521c-4b67-b62a-0e988eb066c4/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1471.411860] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4234f517-faf1-4d03-9fb4-64b2780fa67b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.420432] env[61663]: DEBUG oslo_vmware.api [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Waiting for the task: (returnval){ [ 1471.420432] env[61663]: value = "task-1690722" [ 1471.420432] env[61663]: _type = "Task" [ 1471.420432] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.428478] env[61663]: DEBUG oslo_vmware.api [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Task: {'id': task-1690722, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.693251] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1471.694234] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1471.694234] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1471.694234] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Cleaning up deleted instances with incomplete migration {{(pid=61663) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1471.933221] env[61663]: DEBUG oslo_vmware.exceptions [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1471.933679] env[61663]: DEBUG oslo_concurrency.lockutils [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1471.934691] env[61663]: ERROR nova.compute.manager [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1471.934691] env[61663]: Faults: ['InvalidArgument'] [ 1471.934691] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Traceback (most recent call last): [ 1471.934691] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1471.934691] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] yield resources [ 1471.934691] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1471.934691] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] self.driver.spawn(context, instance, image_meta, [ 1471.934691] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1471.934691] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1471.934691] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1471.934691] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] self._fetch_image_if_missing(context, vi) [ 1471.934691] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1471.935103] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] image_cache(vi, tmp_image_ds_loc) [ 1471.935103] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1471.935103] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] vm_util.copy_virtual_disk( [ 1471.935103] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1471.935103] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] session._wait_for_task(vmdk_copy_task) [ 1471.935103] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1471.935103] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] return self.wait_for_task(task_ref) [ 1471.935103] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1471.935103] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] return evt.wait() [ 1471.935103] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1471.935103] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] result = hub.switch() [ 1471.935103] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1471.935103] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] return self.greenlet.switch() [ 1471.935429] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1471.935429] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] self.f(*self.args, **self.kw) [ 1471.935429] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1471.935429] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] raise exceptions.translate_fault(task_info.error) [ 1471.935429] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1471.935429] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Faults: ['InvalidArgument'] [ 1471.935429] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] [ 1471.935429] env[61663]: INFO nova.compute.manager [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Terminating instance [ 1471.937554] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1471.937765] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1471.938035] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00ed67e1-1779-42ba-8229-ba15ec562aff {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.941565] env[61663]: DEBUG nova.compute.manager [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1471.941758] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1471.942835] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfeae63-58e6-4b53-905b-0b4ac9932aa7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.951518] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1471.951833] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-051dee01-7bdf-43d5-810b-facad34c9274 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.954298] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1471.954486] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1471.955510] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa50cf7f-6cb4-45c8-8bdb-f9a9253b626a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.961850] env[61663]: DEBUG oslo_vmware.api [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Waiting for the task: (returnval){ [ 1471.961850] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529d4c73-4891-506d-933a-7d5675a9bd7b" [ 1471.961850] env[61663]: _type = "Task" [ 1471.961850] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.971585] env[61663]: DEBUG oslo_vmware.api [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529d4c73-4891-506d-933a-7d5675a9bd7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.145461] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1472.145725] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1472.145917] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Deleting the datastore file [datastore1] 1c36f4af-0222-48d3-ac90-776f7fe807de {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1472.146348] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c412c110-b055-4b3a-9f82-212e9bfdb98f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.152682] env[61663]: DEBUG oslo_vmware.api [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Waiting for the task: (returnval){ [ 1472.152682] env[61663]: value = "task-1690724" [ 1472.152682] env[61663]: _type = "Task" [ 1472.152682] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.161720] env[61663]: DEBUG oslo_vmware.api [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Task: {'id': task-1690724, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.475834] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1472.475834] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Creating directory with path [datastore1] vmware_temp/35282756-0032-412d-8d0c-de5b17c01fa1/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1472.475834] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c1830b0-2929-414b-ae79-b11127792763 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.487019] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Created directory with path [datastore1] vmware_temp/35282756-0032-412d-8d0c-de5b17c01fa1/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1472.487019] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Fetch image to [datastore1] vmware_temp/35282756-0032-412d-8d0c-de5b17c01fa1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1472.487019] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/35282756-0032-412d-8d0c-de5b17c01fa1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1472.487019] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd7a0bd-7606-4389-ad43-33d447e78aaf {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.493896] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1285fa85-aa7f-4c14-b358-64c0a35893a2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.503852] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1c8afb9-f94e-46ad-bf07-593734015d61 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.542728] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47969f9d-4b85-4dc5-bd8f-71e63c3cfffb {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.549522] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-66a0e94f-e8c1-4839-8027-88c3bc806576 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.576278] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1472.625149] env[61663]: DEBUG oslo_vmware.rw_handles [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/35282756-0032-412d-8d0c-de5b17c01fa1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1472.698144] env[61663]: DEBUG oslo_vmware.rw_handles [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1472.698144] env[61663]: DEBUG oslo_vmware.rw_handles [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/35282756-0032-412d-8d0c-de5b17c01fa1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1472.704581] env[61663]: DEBUG oslo_vmware.api [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Task: {'id': task-1690724, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080559} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.704581] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1472.704581] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1472.704581] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1472.704581] env[61663]: INFO nova.compute.manager [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Took 0.76 seconds to destroy the instance on the hypervisor. [ 1472.706965] env[61663]: DEBUG nova.compute.claims [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1472.706965] env[61663]: DEBUG oslo_concurrency.lockutils [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1472.706965] env[61663]: DEBUG oslo_concurrency.lockutils [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.127832] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f3e3a4d9-40d7-46a6-984d-99ddae74823a tempest-MigrationsAdminTest-364237900 tempest-MigrationsAdminTest-364237900-project-member] Acquiring lock "39380f25-15a5-4d8f-b38b-39e1b3561314" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.127832] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f3e3a4d9-40d7-46a6-984d-99ddae74823a tempest-MigrationsAdminTest-364237900 tempest-MigrationsAdminTest-364237900-project-member] Lock "39380f25-15a5-4d8f-b38b-39e1b3561314" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.193595] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8167730e-c8e9-47d9-aee3-1cecada08f2d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.201721] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b983ca55-75d9-4ecb-99dd-e70f48bc6073 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.232158] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15467da0-3c35-4c8d-b0d8-a1060eba9477 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.239887] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d38d23-3a9f-4abb-a8b1-6a507df9d3be {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.254123] env[61663]: DEBUG nova.compute.provider_tree [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1473.263630] env[61663]: DEBUG nova.scheduler.client.report [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1473.281220] env[61663]: DEBUG oslo_concurrency.lockutils [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.578s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.281790] env[61663]: ERROR nova.compute.manager [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1473.281790] env[61663]: Faults: ['InvalidArgument'] [ 1473.281790] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Traceback (most recent call last): [ 1473.281790] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1473.281790] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] self.driver.spawn(context, instance, image_meta, [ 1473.281790] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1473.281790] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1473.281790] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1473.281790] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] self._fetch_image_if_missing(context, vi) [ 1473.281790] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1473.281790] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] image_cache(vi, tmp_image_ds_loc) [ 1473.281790] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1473.282168] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] vm_util.copy_virtual_disk( [ 1473.282168] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1473.282168] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] session._wait_for_task(vmdk_copy_task) [ 1473.282168] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1473.282168] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] return self.wait_for_task(task_ref) [ 1473.282168] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1473.282168] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] return evt.wait() [ 1473.282168] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1473.282168] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] result = hub.switch() [ 1473.282168] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1473.282168] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] return self.greenlet.switch() [ 1473.282168] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1473.282168] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] self.f(*self.args, **self.kw) [ 1473.282654] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1473.282654] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] raise exceptions.translate_fault(task_info.error) [ 1473.282654] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1473.282654] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Faults: ['InvalidArgument'] [ 1473.282654] env[61663]: ERROR nova.compute.manager [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] [ 1473.282654] env[61663]: DEBUG nova.compute.utils [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1473.283969] env[61663]: DEBUG nova.compute.manager [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Build of instance 1c36f4af-0222-48d3-ac90-776f7fe807de was re-scheduled: A specified parameter was not correct: fileType [ 1473.283969] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1473.284354] env[61663]: DEBUG nova.compute.manager [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1473.284529] env[61663]: DEBUG nova.compute.manager [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1473.284689] env[61663]: DEBUG nova.compute.manager [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1473.284853] env[61663]: DEBUG nova.network.neutron [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1473.450075] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Acquiring lock "ef8528db-1338-4af6-9d4a-5eda7fe69a98" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.450741] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Lock "ef8528db-1338-4af6-9d4a-5eda7fe69a98" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.473693] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Acquiring lock "536c3f6e-757d-4b59-bf82-c01f735746d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.474034] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Lock "536c3f6e-757d-4b59-bf82-c01f735746d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.699018] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1473.699215] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1473.994873] env[61663]: DEBUG nova.network.neutron [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1474.014540] env[61663]: INFO nova.compute.manager [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Took 0.73 seconds to deallocate network for instance. [ 1474.138468] env[61663]: INFO nova.scheduler.client.report [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Deleted allocations for instance 1c36f4af-0222-48d3-ac90-776f7fe807de [ 1474.161179] env[61663]: DEBUG oslo_concurrency.lockutils [None req-62955f0e-9b7a-443d-8d57-330ae5cda766 tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Lock "1c36f4af-0222-48d3-ac90-776f7fe807de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 242.611s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.162373] env[61663]: DEBUG oslo_concurrency.lockutils [None req-383046b4-44f0-4925-84d6-231402c9ee3b tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Lock "1c36f4af-0222-48d3-ac90-776f7fe807de" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 43.713s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.162591] env[61663]: DEBUG oslo_concurrency.lockutils [None req-383046b4-44f0-4925-84d6-231402c9ee3b tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Acquiring lock "1c36f4af-0222-48d3-ac90-776f7fe807de-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1474.162904] env[61663]: DEBUG oslo_concurrency.lockutils [None req-383046b4-44f0-4925-84d6-231402c9ee3b tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Lock "1c36f4af-0222-48d3-ac90-776f7fe807de-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.163016] env[61663]: DEBUG oslo_concurrency.lockutils [None req-383046b4-44f0-4925-84d6-231402c9ee3b tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Lock "1c36f4af-0222-48d3-ac90-776f7fe807de-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.166647] env[61663]: INFO nova.compute.manager [None req-383046b4-44f0-4925-84d6-231402c9ee3b tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Terminating instance [ 1474.169339] env[61663]: DEBUG nova.compute.manager [None req-383046b4-44f0-4925-84d6-231402c9ee3b tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1474.169597] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-383046b4-44f0-4925-84d6-231402c9ee3b tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1474.169874] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e99c108c-7fd2-4596-8063-39848fe9e434 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.175685] env[61663]: DEBUG nova.compute.manager [None req-eb0262fd-69a1-49a7-a7da-79f570b4a6ec tempest-ImagesNegativeTestJSON-285249914 tempest-ImagesNegativeTestJSON-285249914-project-member] [instance: fffb383d-e1db-4640-9201-0ea897c472d9] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1474.188147] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c51c849b-f835-436b-a8a6-8cbaef684352 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.217207] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-383046b4-44f0-4925-84d6-231402c9ee3b tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1c36f4af-0222-48d3-ac90-776f7fe807de could not be found. [ 1474.217428] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-383046b4-44f0-4925-84d6-231402c9ee3b tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1474.217636] env[61663]: INFO nova.compute.manager [None req-383046b4-44f0-4925-84d6-231402c9ee3b tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1474.217894] env[61663]: DEBUG oslo.service.loopingcall [None req-383046b4-44f0-4925-84d6-231402c9ee3b tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1474.218317] env[61663]: DEBUG nova.compute.manager [None req-eb0262fd-69a1-49a7-a7da-79f570b4a6ec tempest-ImagesNegativeTestJSON-285249914 tempest-ImagesNegativeTestJSON-285249914-project-member] [instance: fffb383d-e1db-4640-9201-0ea897c472d9] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1474.219399] env[61663]: DEBUG nova.compute.manager [-] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1474.219504] env[61663]: DEBUG nova.network.neutron [-] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1474.241331] env[61663]: DEBUG oslo_concurrency.lockutils [None req-eb0262fd-69a1-49a7-a7da-79f570b4a6ec tempest-ImagesNegativeTestJSON-285249914 tempest-ImagesNegativeTestJSON-285249914-project-member] Lock "fffb383d-e1db-4640-9201-0ea897c472d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.111s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.251265] env[61663]: DEBUG nova.compute.manager [None req-6076379f-4c07-4d68-96ce-69c60b736ecc tempest-ServersWithSpecificFlavorTestJSON-1727622844 tempest-ServersWithSpecificFlavorTestJSON-1727622844-project-member] [instance: 4ef40272-deb2-414a-b0ac-0bc30ba2bc84] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1474.277408] env[61663]: DEBUG nova.compute.manager [None req-6076379f-4c07-4d68-96ce-69c60b736ecc tempest-ServersWithSpecificFlavorTestJSON-1727622844 tempest-ServersWithSpecificFlavorTestJSON-1727622844-project-member] [instance: 4ef40272-deb2-414a-b0ac-0bc30ba2bc84] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1474.289343] env[61663]: DEBUG nova.network.neutron [-] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1474.297119] env[61663]: INFO nova.compute.manager [-] [instance: 1c36f4af-0222-48d3-ac90-776f7fe807de] Took 0.08 seconds to deallocate network for instance. [ 1474.303932] env[61663]: DEBUG oslo_concurrency.lockutils [None req-6076379f-4c07-4d68-96ce-69c60b736ecc tempest-ServersWithSpecificFlavorTestJSON-1727622844 tempest-ServersWithSpecificFlavorTestJSON-1727622844-project-member] Lock "4ef40272-deb2-414a-b0ac-0bc30ba2bc84" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.374s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.313675] env[61663]: DEBUG nova.compute.manager [None req-d63d7339-275a-47da-b869-fd4577a50edb tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: 879dce1e-340a-48d0-9291-857f39fec597] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1474.362499] env[61663]: DEBUG nova.compute.manager [None req-d63d7339-275a-47da-b869-fd4577a50edb tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: 879dce1e-340a-48d0-9291-857f39fec597] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1474.399202] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d63d7339-275a-47da-b869-fd4577a50edb tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Lock "879dce1e-340a-48d0-9291-857f39fec597" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.151s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.413270] env[61663]: DEBUG nova.compute.manager [None req-d63d7339-275a-47da-b869-fd4577a50edb tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: f8dbebee-1ccc-4af8-a302-75e2f819161d] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1474.435713] env[61663]: DEBUG oslo_concurrency.lockutils [None req-383046b4-44f0-4925-84d6-231402c9ee3b tempest-ServerExternalEventsTest-1495957632 tempest-ServerExternalEventsTest-1495957632-project-member] Lock "1c36f4af-0222-48d3-ac90-776f7fe807de" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.273s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.441020] env[61663]: DEBUG nova.compute.manager [None req-d63d7339-275a-47da-b869-fd4577a50edb tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: f8dbebee-1ccc-4af8-a302-75e2f819161d] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1474.464261] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d63d7339-275a-47da-b869-fd4577a50edb tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Lock "f8dbebee-1ccc-4af8-a302-75e2f819161d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.178s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.472608] env[61663]: DEBUG nova.compute.manager [None req-baa02889-5d76-43b4-b99e-5a984d78f501 tempest-AttachInterfacesUnderV243Test-810853206 tempest-AttachInterfacesUnderV243Test-810853206-project-member] [instance: 10cb6a2e-4a08-453e-9372-4ea14958470d] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1474.497304] env[61663]: DEBUG nova.compute.manager [None req-baa02889-5d76-43b4-b99e-5a984d78f501 tempest-AttachInterfacesUnderV243Test-810853206 tempest-AttachInterfacesUnderV243Test-810853206-project-member] [instance: 10cb6a2e-4a08-453e-9372-4ea14958470d] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1474.523066] env[61663]: DEBUG oslo_concurrency.lockutils [None req-baa02889-5d76-43b4-b99e-5a984d78f501 tempest-AttachInterfacesUnderV243Test-810853206 tempest-AttachInterfacesUnderV243Test-810853206-project-member] Lock "10cb6a2e-4a08-453e-9372-4ea14958470d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.276s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.534916] env[61663]: DEBUG nova.compute.manager [None req-152a630f-8837-4755-add1-2707b6f242c8 tempest-MigrationsAdminTest-364237900 tempest-MigrationsAdminTest-364237900-project-member] [instance: 29c17291-50be-45a3-93c6-76bc8c7cbbb4] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1474.564878] env[61663]: DEBUG nova.compute.manager [None req-152a630f-8837-4755-add1-2707b6f242c8 tempest-MigrationsAdminTest-364237900 tempest-MigrationsAdminTest-364237900-project-member] [instance: 29c17291-50be-45a3-93c6-76bc8c7cbbb4] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1474.594799] env[61663]: DEBUG oslo_concurrency.lockutils [None req-152a630f-8837-4755-add1-2707b6f242c8 tempest-MigrationsAdminTest-364237900 tempest-MigrationsAdminTest-364237900-project-member] Lock "29c17291-50be-45a3-93c6-76bc8c7cbbb4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.459s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.608352] env[61663]: DEBUG nova.compute.manager [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1474.666029] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1474.666297] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.668523] env[61663]: INFO nova.compute.claims [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1474.835359] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0b08a63c-e52c-48f4-a295-13a2b592078f tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Acquiring lock "fde11c71-3511-4fea-84c7-0e7de062951b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1475.208418] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-336771d3-ef46-47ee-be65-6a8339ff0952 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.217991] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13fe39e9-3ce8-491a-8476-ff66c7ab715c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.252428] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36dc496a-46a8-460d-a0b4-660a9db64780 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.261348] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74ad83c-2b33-4ec7-823c-125459a51284 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.278419] env[61663]: DEBUG nova.compute.provider_tree [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1475.291121] env[61663]: DEBUG nova.scheduler.client.report [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1475.310335] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.644s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1475.310870] env[61663]: DEBUG nova.compute.manager [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1475.355753] env[61663]: DEBUG nova.compute.claims [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1475.356033] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1475.356230] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1475.692469] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.693492] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1475.693492] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1475.865235] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87241bb2-ad85-4807-a9c3-8831bdabd068 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.873443] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69391e9c-e6b0-4701-b71d-708668ca02b6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.911355] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddad6861-2591-42bc-989c-627e50961d7f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.919785] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2239a25-5e8f-4e76-b8c8-0d20c57b8b69 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.937259] env[61663]: DEBUG nova.compute.provider_tree [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1475.953228] env[61663]: DEBUG nova.scheduler.client.report [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1475.972977] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.615s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1475.972977] env[61663]: DEBUG nova.compute.utils [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Conflict updating instance fde11c71-3511-4fea-84c7-0e7de062951b. Expected: {'task_state': [None]}. Actual: {'task_state': 'deleting'} {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1475.974391] env[61663]: DEBUG nova.compute.manager [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Instance disappeared during build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2504}} [ 1475.974556] env[61663]: DEBUG nova.compute.manager [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1475.974798] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Acquiring lock "refresh_cache-fde11c71-3511-4fea-84c7-0e7de062951b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1475.977083] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Acquired lock "refresh_cache-fde11c71-3511-4fea-84c7-0e7de062951b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1475.977083] env[61663]: DEBUG nova.network.neutron [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1476.022547] env[61663]: DEBUG nova.network.neutron [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1476.273431] env[61663]: DEBUG nova.network.neutron [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1476.288645] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Releasing lock "refresh_cache-fde11c71-3511-4fea-84c7-0e7de062951b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1476.288645] env[61663]: DEBUG nova.compute.manager [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1476.288645] env[61663]: DEBUG nova.compute.manager [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1476.288818] env[61663]: DEBUG nova.network.neutron [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1476.318938] env[61663]: DEBUG nova.network.neutron [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1476.330600] env[61663]: DEBUG nova.network.neutron [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1476.339936] env[61663]: INFO nova.compute.manager [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Took 0.05 seconds to deallocate network for instance. [ 1476.418979] env[61663]: INFO nova.scheduler.client.report [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Deleted allocations for instance fde11c71-3511-4fea-84c7-0e7de062951b [ 1476.419307] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ade5cb7a-e705-4b95-a6d8-32e84224b5cb tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Lock "fde11c71-3511-4fea-84c7-0e7de062951b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.847s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.420462] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0b08a63c-e52c-48f4-a295-13a2b592078f tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Lock "fde11c71-3511-4fea-84c7-0e7de062951b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.585s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.420679] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0b08a63c-e52c-48f4-a295-13a2b592078f tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Acquiring lock "fde11c71-3511-4fea-84c7-0e7de062951b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.420920] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0b08a63c-e52c-48f4-a295-13a2b592078f tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Lock "fde11c71-3511-4fea-84c7-0e7de062951b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.421074] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0b08a63c-e52c-48f4-a295-13a2b592078f tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Lock "fde11c71-3511-4fea-84c7-0e7de062951b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.424062] env[61663]: INFO nova.compute.manager [None req-0b08a63c-e52c-48f4-a295-13a2b592078f tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Terminating instance [ 1476.424780] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0b08a63c-e52c-48f4-a295-13a2b592078f tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Acquiring lock "refresh_cache-fde11c71-3511-4fea-84c7-0e7de062951b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1476.424960] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0b08a63c-e52c-48f4-a295-13a2b592078f tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Acquired lock "refresh_cache-fde11c71-3511-4fea-84c7-0e7de062951b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.425285] env[61663]: DEBUG nova.network.neutron [None req-0b08a63c-e52c-48f4-a295-13a2b592078f tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1476.434107] env[61663]: DEBUG nova.compute.manager [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1476.454960] env[61663]: DEBUG nova.network.neutron [None req-0b08a63c-e52c-48f4-a295-13a2b592078f tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1476.484309] env[61663]: DEBUG oslo_concurrency.lockutils [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.484621] env[61663]: DEBUG oslo_concurrency.lockutils [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.487019] env[61663]: INFO nova.compute.claims [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1476.643238] env[61663]: DEBUG nova.network.neutron [None req-0b08a63c-e52c-48f4-a295-13a2b592078f tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1476.654745] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0b08a63c-e52c-48f4-a295-13a2b592078f tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Releasing lock "refresh_cache-fde11c71-3511-4fea-84c7-0e7de062951b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1476.655207] env[61663]: DEBUG nova.compute.manager [None req-0b08a63c-e52c-48f4-a295-13a2b592078f tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1476.655429] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-0b08a63c-e52c-48f4-a295-13a2b592078f tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1476.658273] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ce87e0c2-074e-4429-9c52-4b387cd51996 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.669742] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2f4b6f-8ceb-4fa4-8823-94850d63afd7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.702685] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-0b08a63c-e52c-48f4-a295-13a2b592078f tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fde11c71-3511-4fea-84c7-0e7de062951b could not be found. [ 1476.702988] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-0b08a63c-e52c-48f4-a295-13a2b592078f tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1476.703491] env[61663]: INFO nova.compute.manager [None req-0b08a63c-e52c-48f4-a295-13a2b592078f tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1476.703491] env[61663]: DEBUG oslo.service.loopingcall [None req-0b08a63c-e52c-48f4-a295-13a2b592078f tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1476.705932] env[61663]: DEBUG nova.compute.manager [-] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1476.706040] env[61663]: DEBUG nova.network.neutron [-] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1476.735975] env[61663]: DEBUG nova.network.neutron [-] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1476.748128] env[61663]: DEBUG nova.network.neutron [-] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1476.763405] env[61663]: INFO nova.compute.manager [-] [instance: fde11c71-3511-4fea-84c7-0e7de062951b] Took 0.06 seconds to deallocate network for instance. [ 1476.907203] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0b08a63c-e52c-48f4-a295-13a2b592078f tempest-ServersTestJSON-1666340405 tempest-ServersTestJSON-1666340405-project-member] Lock "fde11c71-3511-4fea-84c7-0e7de062951b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.487s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1477.018018] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93028f64-983b-40d8-8037-7fa0da6aa1e3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.024303] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1a4168-9079-4a2d-a874-70f1155c03bf {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.055923] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0471915f-bb92-4e31-bcf8-30c764fffc6d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.063285] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb5d813a-010e-482c-bec1-b09e263d51ef {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.078363] env[61663]: DEBUG nova.compute.provider_tree [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1477.090019] env[61663]: DEBUG nova.scheduler.client.report [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1477.106621] env[61663]: DEBUG oslo_concurrency.lockutils [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.622s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1477.107370] env[61663]: DEBUG nova.compute.manager [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1477.145021] env[61663]: DEBUG nova.compute.utils [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1477.145021] env[61663]: DEBUG nova.compute.manager [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1477.145021] env[61663]: DEBUG nova.network.neutron [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1477.153730] env[61663]: DEBUG nova.compute.manager [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1477.222122] env[61663]: DEBUG nova.compute.manager [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1477.250019] env[61663]: DEBUG nova.virt.hardware [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1477.250019] env[61663]: DEBUG nova.virt.hardware [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1477.250019] env[61663]: DEBUG nova.virt.hardware [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1477.250218] env[61663]: DEBUG nova.virt.hardware [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1477.250218] env[61663]: DEBUG nova.virt.hardware [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1477.250218] env[61663]: DEBUG nova.virt.hardware [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1477.250409] env[61663]: DEBUG nova.virt.hardware [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1477.250697] env[61663]: DEBUG nova.virt.hardware [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1477.250995] env[61663]: DEBUG nova.virt.hardware [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1477.251295] env[61663]: DEBUG nova.virt.hardware [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1477.251603] env[61663]: DEBUG nova.virt.hardware [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1477.252547] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f300bb-3641-4791-ac98-f5a670016f8c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.261254] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f03d958-18a5-4179-8fd6-067f14bad367 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.315159] env[61663]: DEBUG nova.policy [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5a902380f63e4cd5b44bfda0d6fa8d8b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '97d83beea6a043f3a37a2cfca793ba62', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1477.692707] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1478.001902] env[61663]: DEBUG nova.network.neutron [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Successfully created port: 5190aa07-adc9-4c4f-ad9c-90fa98cb4982 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1479.790878] env[61663]: DEBUG nova.network.neutron [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Successfully updated port: 5190aa07-adc9-4c4f-ad9c-90fa98cb4982 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1479.803072] env[61663]: DEBUG oslo_concurrency.lockutils [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Acquiring lock "refresh_cache-7a7a0ef0-bbea-42c0-b96e-4efc4207a655" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1479.803072] env[61663]: DEBUG oslo_concurrency.lockutils [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Acquired lock "refresh_cache-7a7a0ef0-bbea-42c0-b96e-4efc4207a655" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.803072] env[61663]: DEBUG nova.network.neutron [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1479.875016] env[61663]: DEBUG nova.network.neutron [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1480.240322] env[61663]: DEBUG nova.network.neutron [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Updating instance_info_cache with network_info: [{"id": "5190aa07-adc9-4c4f-ad9c-90fa98cb4982", "address": "fa:16:3e:a6:c0:05", "network": {"id": "e7abb8ec-9a35-40da-8b5f-25fb6d3708ac", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1019619768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97d83beea6a043f3a37a2cfca793ba62", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5190aa07-ad", "ovs_interfaceid": "5190aa07-adc9-4c4f-ad9c-90fa98cb4982", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1480.256635] env[61663]: DEBUG oslo_concurrency.lockutils [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Releasing lock "refresh_cache-7a7a0ef0-bbea-42c0-b96e-4efc4207a655" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1480.256980] env[61663]: DEBUG nova.compute.manager [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Instance network_info: |[{"id": "5190aa07-adc9-4c4f-ad9c-90fa98cb4982", "address": "fa:16:3e:a6:c0:05", "network": {"id": "e7abb8ec-9a35-40da-8b5f-25fb6d3708ac", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1019619768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97d83beea6a043f3a37a2cfca793ba62", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5190aa07-ad", "ovs_interfaceid": "5190aa07-adc9-4c4f-ad9c-90fa98cb4982", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1480.257411] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:c0:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '61a172ee-af3f-473e-b12a-3fee5bf39c8d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5190aa07-adc9-4c4f-ad9c-90fa98cb4982', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1480.268911] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Creating folder: Project (97d83beea6a043f3a37a2cfca793ba62). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1480.269940] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-869ed69d-c25a-4586-887e-18bddba286b2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.282674] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Created folder: Project (97d83beea6a043f3a37a2cfca793ba62) in parent group-v352575. [ 1480.282674] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Creating folder: Instances. Parent ref: group-v352620. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1480.282674] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0958d924-520e-4dcb-9896-f90d10f84b1e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.292557] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Created folder: Instances in parent group-v352620. [ 1480.292875] env[61663]: DEBUG oslo.service.loopingcall [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1480.293541] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1480.293774] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2ed20ff7-8eb1-4eee-9f90-75ce33ccde5f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.320454] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1480.320454] env[61663]: value = "task-1690727" [ 1480.320454] env[61663]: _type = "Task" [ 1480.320454] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.328935] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690727, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.802079] env[61663]: DEBUG nova.compute.manager [req-a5508550-6450-4990-8c3a-0c8f64976a51 req-6d5a5366-53ea-46fe-95b7-386cda0ac978 service nova] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Received event network-vif-plugged-5190aa07-adc9-4c4f-ad9c-90fa98cb4982 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1480.802384] env[61663]: DEBUG oslo_concurrency.lockutils [req-a5508550-6450-4990-8c3a-0c8f64976a51 req-6d5a5366-53ea-46fe-95b7-386cda0ac978 service nova] Acquiring lock "7a7a0ef0-bbea-42c0-b96e-4efc4207a655-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.804554] env[61663]: DEBUG oslo_concurrency.lockutils [req-a5508550-6450-4990-8c3a-0c8f64976a51 req-6d5a5366-53ea-46fe-95b7-386cda0ac978 service nova] Lock "7a7a0ef0-bbea-42c0-b96e-4efc4207a655-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.805417] env[61663]: DEBUG oslo_concurrency.lockutils [req-a5508550-6450-4990-8c3a-0c8f64976a51 req-6d5a5366-53ea-46fe-95b7-386cda0ac978 service nova] Lock "7a7a0ef0-bbea-42c0-b96e-4efc4207a655-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.805641] env[61663]: DEBUG nova.compute.manager [req-a5508550-6450-4990-8c3a-0c8f64976a51 req-6d5a5366-53ea-46fe-95b7-386cda0ac978 service nova] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] No waiting events found dispatching network-vif-plugged-5190aa07-adc9-4c4f-ad9c-90fa98cb4982 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1480.805831] env[61663]: WARNING nova.compute.manager [req-a5508550-6450-4990-8c3a-0c8f64976a51 req-6d5a5366-53ea-46fe-95b7-386cda0ac978 service nova] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Received unexpected event network-vif-plugged-5190aa07-adc9-4c4f-ad9c-90fa98cb4982 for instance with vm_state building and task_state spawning. [ 1480.837909] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690727, 'name': CreateVM_Task, 'duration_secs': 0.304677} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.837909] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1480.837909] env[61663]: DEBUG oslo_concurrency.lockutils [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1480.837909] env[61663]: DEBUG oslo_concurrency.lockutils [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1480.837909] env[61663]: DEBUG oslo_concurrency.lockutils [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1480.838124] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca3f32da-6b9f-4ded-a6b2-3578eca88aa4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.843957] env[61663]: DEBUG oslo_vmware.api [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Waiting for the task: (returnval){ [ 1480.843957] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5209307e-aa26-2ac2-0a2f-74449a7627d7" [ 1480.843957] env[61663]: _type = "Task" [ 1480.843957] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.854018] env[61663]: DEBUG oslo_vmware.api [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5209307e-aa26-2ac2-0a2f-74449a7627d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.355031] env[61663]: DEBUG oslo_concurrency.lockutils [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1481.355031] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1481.355031] env[61663]: DEBUG oslo_concurrency.lockutils [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1481.849847] env[61663]: DEBUG oslo_concurrency.lockutils [None req-41a13fac-5bcd-4bea-bb2f-ba21b899d5fc tempest-ServerRescueTestJSON-566576593 tempest-ServerRescueTestJSON-566576593-project-member] Acquiring lock "058b6485-898e-4799-899a-df5297144271" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1481.850231] env[61663]: DEBUG oslo_concurrency.lockutils [None req-41a13fac-5bcd-4bea-bb2f-ba21b899d5fc tempest-ServerRescueTestJSON-566576593 tempest-ServerRescueTestJSON-566576593-project-member] Lock "058b6485-898e-4799-899a-df5297144271" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.110645] env[61663]: DEBUG nova.compute.manager [req-66f60708-9c60-43c4-87b9-c57323bd65e4 req-bb4704ba-8c7b-4d12-86c3-778ad450317f service nova] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Received event network-changed-5190aa07-adc9-4c4f-ad9c-90fa98cb4982 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1483.110918] env[61663]: DEBUG nova.compute.manager [req-66f60708-9c60-43c4-87b9-c57323bd65e4 req-bb4704ba-8c7b-4d12-86c3-778ad450317f service nova] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Refreshing instance network info cache due to event network-changed-5190aa07-adc9-4c4f-ad9c-90fa98cb4982. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1483.111164] env[61663]: DEBUG oslo_concurrency.lockutils [req-66f60708-9c60-43c4-87b9-c57323bd65e4 req-bb4704ba-8c7b-4d12-86c3-778ad450317f service nova] Acquiring lock "refresh_cache-7a7a0ef0-bbea-42c0-b96e-4efc4207a655" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1483.111360] env[61663]: DEBUG oslo_concurrency.lockutils [req-66f60708-9c60-43c4-87b9-c57323bd65e4 req-bb4704ba-8c7b-4d12-86c3-778ad450317f service nova] Acquired lock "refresh_cache-7a7a0ef0-bbea-42c0-b96e-4efc4207a655" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1483.111565] env[61663]: DEBUG nova.network.neutron [req-66f60708-9c60-43c4-87b9-c57323bd65e4 req-bb4704ba-8c7b-4d12-86c3-778ad450317f service nova] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Refreshing network info cache for port 5190aa07-adc9-4c4f-ad9c-90fa98cb4982 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1483.117306] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d8c60d36-53a6-4aa7-a8e2-12fb27433495 tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Acquiring lock "7a7a0ef0-bbea-42c0-b96e-4efc4207a655" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.801453] env[61663]: DEBUG nova.network.neutron [req-66f60708-9c60-43c4-87b9-c57323bd65e4 req-bb4704ba-8c7b-4d12-86c3-778ad450317f service nova] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Updated VIF entry in instance network info cache for port 5190aa07-adc9-4c4f-ad9c-90fa98cb4982. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1483.801453] env[61663]: DEBUG nova.network.neutron [req-66f60708-9c60-43c4-87b9-c57323bd65e4 req-bb4704ba-8c7b-4d12-86c3-778ad450317f service nova] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Updating instance_info_cache with network_info: [{"id": "5190aa07-adc9-4c4f-ad9c-90fa98cb4982", "address": "fa:16:3e:a6:c0:05", "network": {"id": "e7abb8ec-9a35-40da-8b5f-25fb6d3708ac", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1019619768-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97d83beea6a043f3a37a2cfca793ba62", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5190aa07-ad", "ovs_interfaceid": "5190aa07-adc9-4c4f-ad9c-90fa98cb4982", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1483.812521] env[61663]: DEBUG oslo_concurrency.lockutils [req-66f60708-9c60-43c4-87b9-c57323bd65e4 req-bb4704ba-8c7b-4d12-86c3-778ad450317f service nova] Releasing lock "refresh_cache-7a7a0ef0-bbea-42c0-b96e-4efc4207a655" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1487.128093] env[61663]: DEBUG oslo_concurrency.lockutils [None req-fbdf756e-7265-49a3-8c71-b31b45a20dec tempest-ServerShowV254Test-647196749 tempest-ServerShowV254Test-647196749-project-member] Acquiring lock "f2a87111-4361-4e0a-940c-3c163c2d5e72" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1487.128375] env[61663]: DEBUG oslo_concurrency.lockutils [None req-fbdf756e-7265-49a3-8c71-b31b45a20dec tempest-ServerShowV254Test-647196749 tempest-ServerShowV254Test-647196749-project-member] Lock "f2a87111-4361-4e0a-940c-3c163c2d5e72" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.604613] env[61663]: DEBUG oslo_concurrency.lockutils [None req-338995db-ebbb-459a-aa1b-ea3a6dd7047e tempest-ImagesOneServerNegativeTestJSON-224650719 tempest-ImagesOneServerNegativeTestJSON-224650719-project-member] Acquiring lock "26e22311-811c-49cf-b2df-40822f2e4f3c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.605021] env[61663]: DEBUG oslo_concurrency.lockutils [None req-338995db-ebbb-459a-aa1b-ea3a6dd7047e tempest-ImagesOneServerNegativeTestJSON-224650719 tempest-ImagesOneServerNegativeTestJSON-224650719-project-member] Lock "26e22311-811c-49cf-b2df-40822f2e4f3c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1490.262983] env[61663]: DEBUG oslo_concurrency.lockutils [None req-43750839-8991-4f82-9666-8097bb755029 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Acquiring lock "f71fce22-f27b-4e5d-94e4-697d09377ed1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1490.262983] env[61663]: DEBUG oslo_concurrency.lockutils [None req-43750839-8991-4f82-9666-8097bb755029 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Lock "f71fce22-f27b-4e5d-94e4-697d09377ed1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.014593] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cdd6fb62-2ec7-424d-9d2c-2c11b1adfc86 tempest-DeleteServersAdminTestJSON-1903805484 tempest-DeleteServersAdminTestJSON-1903805484-project-member] Acquiring lock "06030fd6-0e35-42dc-bd66-cfc95930e90a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.014593] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cdd6fb62-2ec7-424d-9d2c-2c11b1adfc86 tempest-DeleteServersAdminTestJSON-1903805484 tempest-DeleteServersAdminTestJSON-1903805484-project-member] Lock "06030fd6-0e35-42dc-bd66-cfc95930e90a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1502.383654] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f6b9016b-d1cd-4ad5-921e-011dc3842892 tempest-ServersNegativeTestMultiTenantJSON-1168191994 tempest-ServersNegativeTestMultiTenantJSON-1168191994-project-member] Acquiring lock "65cdd238-4875-4dad-9df0-0aeda65ab9ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1502.385286] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f6b9016b-d1cd-4ad5-921e-011dc3842892 tempest-ServersNegativeTestMultiTenantJSON-1168191994 tempest-ServersNegativeTestMultiTenantJSON-1168191994-project-member] Lock "65cdd238-4875-4dad-9df0-0aeda65ab9ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.637374] env[61663]: DEBUG oslo_concurrency.lockutils [None req-202f74b1-1b62-4490-95f7-f625f2b42e2b tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquiring lock "f3d817b9-1a93-4fb8-b25c-756de9152f17" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.637678] env[61663]: DEBUG oslo_concurrency.lockutils [None req-202f74b1-1b62-4490-95f7-f625f2b42e2b tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Lock "f3d817b9-1a93-4fb8-b25c-756de9152f17" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1508.718841] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c3f9183e-aee5-439a-8d3f-77031544ed74 tempest-InstanceActionsV221TestJSON-1618392584 tempest-InstanceActionsV221TestJSON-1618392584-project-member] Acquiring lock "5c76183a-cdcb-49e7-95b9-75a635352479" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1508.719208] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c3f9183e-aee5-439a-8d3f-77031544ed74 tempest-InstanceActionsV221TestJSON-1618392584 tempest-InstanceActionsV221TestJSON-1618392584-project-member] Lock "5c76183a-cdcb-49e7-95b9-75a635352479" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1518.504148] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cbc7f19f-62ce-4589-b0f4-9f499af3a6d5 tempest-ServerMetadataTestJSON-50623780 tempest-ServerMetadataTestJSON-50623780-project-member] Acquiring lock "14682ec1-2d3f-4601-a48e-832e7f2072d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1518.504430] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cbc7f19f-62ce-4589-b0f4-9f499af3a6d5 tempest-ServerMetadataTestJSON-50623780 tempest-ServerMetadataTestJSON-50623780-project-member] Lock "14682ec1-2d3f-4601-a48e-832e7f2072d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.155180] env[61663]: WARNING oslo_vmware.rw_handles [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1519.155180] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1519.155180] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1519.155180] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1519.155180] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1519.155180] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 1519.155180] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1519.155180] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1519.155180] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1519.155180] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1519.155180] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1519.155180] env[61663]: ERROR oslo_vmware.rw_handles [ 1519.155673] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/35282756-0032-412d-8d0c-de5b17c01fa1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1519.157734] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1519.158050] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Copying Virtual Disk [datastore1] vmware_temp/35282756-0032-412d-8d0c-de5b17c01fa1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/35282756-0032-412d-8d0c-de5b17c01fa1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1519.158347] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f05713f-9c1e-4afc-bac5-c0eda7b01bd6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.166622] env[61663]: DEBUG oslo_vmware.api [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Waiting for the task: (returnval){ [ 1519.166622] env[61663]: value = "task-1690728" [ 1519.166622] env[61663]: _type = "Task" [ 1519.166622] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.174256] env[61663]: DEBUG oslo_vmware.api [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Task: {'id': task-1690728, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.677077] env[61663]: DEBUG oslo_vmware.exceptions [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1519.677361] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1519.678063] env[61663]: ERROR nova.compute.manager [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1519.678063] env[61663]: Faults: ['InvalidArgument'] [ 1519.678063] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Traceback (most recent call last): [ 1519.678063] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1519.678063] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] yield resources [ 1519.678063] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1519.678063] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] self.driver.spawn(context, instance, image_meta, [ 1519.678063] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1519.678063] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1519.678063] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1519.678063] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] self._fetch_image_if_missing(context, vi) [ 1519.678063] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1519.678456] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] image_cache(vi, tmp_image_ds_loc) [ 1519.678456] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1519.678456] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] vm_util.copy_virtual_disk( [ 1519.678456] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1519.678456] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] session._wait_for_task(vmdk_copy_task) [ 1519.678456] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1519.678456] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] return self.wait_for_task(task_ref) [ 1519.678456] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1519.678456] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] return evt.wait() [ 1519.678456] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1519.678456] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] result = hub.switch() [ 1519.678456] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1519.678456] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] return self.greenlet.switch() [ 1519.678804] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1519.678804] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] self.f(*self.args, **self.kw) [ 1519.678804] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1519.678804] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] raise exceptions.translate_fault(task_info.error) [ 1519.678804] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1519.678804] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Faults: ['InvalidArgument'] [ 1519.678804] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] [ 1519.678804] env[61663]: INFO nova.compute.manager [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Terminating instance [ 1519.679989] env[61663]: DEBUG oslo_concurrency.lockutils [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1519.680213] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1519.680839] env[61663]: DEBUG nova.compute.manager [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1519.681043] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1519.681267] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-658f8b20-ba24-4d3d-acb3-65d1bdf1c2a2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.683525] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f616123-2b54-4acd-8e36-cbb9195890ca {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.690862] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1519.691115] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ecc2a64a-7d65-47d5-91be-b7f224a36a8a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.693442] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1519.693615] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1519.694544] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2d592eb-9d3b-4a25-94c7-56a0d2a1685b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.699181] env[61663]: DEBUG oslo_vmware.api [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Waiting for the task: (returnval){ [ 1519.699181] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525e0897-7e67-c7c2-2b72-fd969ba4b0bb" [ 1519.699181] env[61663]: _type = "Task" [ 1519.699181] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.708360] env[61663]: DEBUG oslo_vmware.api [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525e0897-7e67-c7c2-2b72-fd969ba4b0bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.209458] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1520.209729] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Creating directory with path [datastore1] vmware_temp/557c103b-b29e-4b9e-af39-fb62929b9d10/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1520.209962] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a537eff-2483-48ee-a7af-d676e516f1e3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.675593] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Created directory with path [datastore1] vmware_temp/557c103b-b29e-4b9e-af39-fb62929b9d10/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1520.675846] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Fetch image to [datastore1] vmware_temp/557c103b-b29e-4b9e-af39-fb62929b9d10/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1520.676054] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/557c103b-b29e-4b9e-af39-fb62929b9d10/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1520.676852] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb17991-27c6-4aeb-af09-ea678c73739e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.683967] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe70647-c53f-480a-ba3a-c32c84d087de {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.693460] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07bd2f63-6c1a-4158-bcb0-829bb1c579b4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.723429] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f09257-32ab-4d9d-abf9-8929e496aad2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.729101] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1e6558c2-5d91-4538-bac6-66adab762f51 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.762156] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1520.818103] env[61663]: DEBUG oslo_vmware.rw_handles [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/557c103b-b29e-4b9e-af39-fb62929b9d10/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1520.879596] env[61663]: DEBUG oslo_vmware.rw_handles [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1520.879596] env[61663]: DEBUG oslo_vmware.rw_handles [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/557c103b-b29e-4b9e-af39-fb62929b9d10/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1521.471992] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1521.472247] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1521.472453] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Deleting the datastore file [datastore1] 5e748c4b-03c5-4a88-a4ed-27093f2aef47 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1521.472727] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-af222d5a-a31a-417c-84b0-b2239201f58e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.479051] env[61663]: DEBUG oslo_vmware.api [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Waiting for the task: (returnval){ [ 1521.479051] env[61663]: value = "task-1690730" [ 1521.479051] env[61663]: _type = "Task" [ 1521.479051] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.487051] env[61663]: DEBUG oslo_vmware.api [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Task: {'id': task-1690730, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.988995] env[61663]: DEBUG oslo_vmware.api [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Task: {'id': task-1690730, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082636} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.990957] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1521.990957] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1521.990957] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1521.990957] env[61663]: INFO nova.compute.manager [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Took 2.31 seconds to destroy the instance on the hypervisor. [ 1521.995027] env[61663]: DEBUG nova.compute.claims [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1521.995319] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1521.995609] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.334522] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212b2caf-52b5-4020-9155-0f49563a9b18 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.343458] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b64b59f9-f9ff-4286-b6c2-25775c8dbe02 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.371962] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7264cca2-f507-4ab1-9845-e0088701f287 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.378550] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-663da22d-f5ac-4c02-88a4-c03ce38f4a70 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.390953] env[61663]: DEBUG nova.compute.provider_tree [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1522.399704] env[61663]: DEBUG nova.scheduler.client.report [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1522.418189] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.422s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.418692] env[61663]: ERROR nova.compute.manager [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1522.418692] env[61663]: Faults: ['InvalidArgument'] [ 1522.418692] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Traceback (most recent call last): [ 1522.418692] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1522.418692] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] self.driver.spawn(context, instance, image_meta, [ 1522.418692] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1522.418692] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1522.418692] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1522.418692] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] self._fetch_image_if_missing(context, vi) [ 1522.418692] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1522.418692] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] image_cache(vi, tmp_image_ds_loc) [ 1522.418692] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1522.419102] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] vm_util.copy_virtual_disk( [ 1522.419102] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1522.419102] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] session._wait_for_task(vmdk_copy_task) [ 1522.419102] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1522.419102] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] return self.wait_for_task(task_ref) [ 1522.419102] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1522.419102] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] return evt.wait() [ 1522.419102] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1522.419102] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] result = hub.switch() [ 1522.419102] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1522.419102] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] return self.greenlet.switch() [ 1522.419102] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1522.419102] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] self.f(*self.args, **self.kw) [ 1522.419509] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1522.419509] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] raise exceptions.translate_fault(task_info.error) [ 1522.419509] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1522.419509] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Faults: ['InvalidArgument'] [ 1522.419509] env[61663]: ERROR nova.compute.manager [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] [ 1522.419509] env[61663]: DEBUG nova.compute.utils [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1522.420721] env[61663]: DEBUG nova.compute.manager [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Build of instance 5e748c4b-03c5-4a88-a4ed-27093f2aef47 was re-scheduled: A specified parameter was not correct: fileType [ 1522.420721] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1522.421109] env[61663]: DEBUG nova.compute.manager [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1522.421287] env[61663]: DEBUG nova.compute.manager [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1522.421460] env[61663]: DEBUG nova.compute.manager [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1522.421627] env[61663]: DEBUG nova.network.neutron [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1522.780023] env[61663]: DEBUG nova.network.neutron [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1522.797129] env[61663]: INFO nova.compute.manager [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Took 0.37 seconds to deallocate network for instance. [ 1522.919877] env[61663]: INFO nova.scheduler.client.report [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Deleted allocations for instance 5e748c4b-03c5-4a88-a4ed-27093f2aef47 [ 1522.950758] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e40e3fe-ed35-48d9-b9dc-9d13aa3496a2 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Lock "5e748c4b-03c5-4a88-a4ed-27093f2aef47" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 288.841s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.952029] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5e4074de-db31-4601-ae78-a3f9fd68c304 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Lock "5e748c4b-03c5-4a88-a4ed-27093f2aef47" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 90.305s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.952274] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5e4074de-db31-4601-ae78-a3f9fd68c304 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Acquiring lock "5e748c4b-03c5-4a88-a4ed-27093f2aef47-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1522.952491] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5e4074de-db31-4601-ae78-a3f9fd68c304 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Lock "5e748c4b-03c5-4a88-a4ed-27093f2aef47-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.952715] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5e4074de-db31-4601-ae78-a3f9fd68c304 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Lock "5e748c4b-03c5-4a88-a4ed-27093f2aef47-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.954915] env[61663]: INFO nova.compute.manager [None req-5e4074de-db31-4601-ae78-a3f9fd68c304 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Terminating instance [ 1522.956799] env[61663]: DEBUG nova.compute.manager [None req-5e4074de-db31-4601-ae78-a3f9fd68c304 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1522.956977] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4074de-db31-4601-ae78-a3f9fd68c304 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1522.957833] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-59d3f001-e31c-4f5b-b2cf-46d160afa9d8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.967948] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbcb8a1b-7f4c-482f-a229-1617ca4a34b2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.983917] env[61663]: DEBUG nova.compute.manager [None req-e4d76654-45e2-4b2a-8895-7c5c4d35a945 tempest-ListServersNegativeTestJSON-359424066 tempest-ListServersNegativeTestJSON-359424066-project-member] [instance: 71250f0d-8e81-444b-bed7-792f229a19a5] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1523.001096] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-5e4074de-db31-4601-ae78-a3f9fd68c304 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5e748c4b-03c5-4a88-a4ed-27093f2aef47 could not be found. [ 1523.001390] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4074de-db31-4601-ae78-a3f9fd68c304 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1523.001845] env[61663]: INFO nova.compute.manager [None req-5e4074de-db31-4601-ae78-a3f9fd68c304 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1523.001943] env[61663]: DEBUG oslo.service.loopingcall [None req-5e4074de-db31-4601-ae78-a3f9fd68c304 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1523.002178] env[61663]: DEBUG nova.compute.manager [-] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1523.002317] env[61663]: DEBUG nova.network.neutron [-] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1523.019481] env[61663]: DEBUG nova.compute.manager [None req-e4d76654-45e2-4b2a-8895-7c5c4d35a945 tempest-ListServersNegativeTestJSON-359424066 tempest-ListServersNegativeTestJSON-359424066-project-member] [instance: 71250f0d-8e81-444b-bed7-792f229a19a5] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1523.037109] env[61663]: DEBUG nova.network.neutron [-] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1523.046758] env[61663]: INFO nova.compute.manager [-] [instance: 5e748c4b-03c5-4a88-a4ed-27093f2aef47] Took 0.04 seconds to deallocate network for instance. [ 1523.053668] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e4d76654-45e2-4b2a-8895-7c5c4d35a945 tempest-ListServersNegativeTestJSON-359424066 tempest-ListServersNegativeTestJSON-359424066-project-member] Lock "71250f0d-8e81-444b-bed7-792f229a19a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.527s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.069564] env[61663]: DEBUG nova.compute.manager [None req-e4d76654-45e2-4b2a-8895-7c5c4d35a945 tempest-ListServersNegativeTestJSON-359424066 tempest-ListServersNegativeTestJSON-359424066-project-member] [instance: 6c3af584-cfad-4645-8246-d0366ddb8775] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1523.118540] env[61663]: DEBUG nova.compute.manager [None req-e4d76654-45e2-4b2a-8895-7c5c4d35a945 tempest-ListServersNegativeTestJSON-359424066 tempest-ListServersNegativeTestJSON-359424066-project-member] [instance: 6c3af584-cfad-4645-8246-d0366ddb8775] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1523.141460] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e4d76654-45e2-4b2a-8895-7c5c4d35a945 tempest-ListServersNegativeTestJSON-359424066 tempest-ListServersNegativeTestJSON-359424066-project-member] Lock "6c3af584-cfad-4645-8246-d0366ddb8775" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.574s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.152519] env[61663]: DEBUG nova.compute.manager [None req-e4d76654-45e2-4b2a-8895-7c5c4d35a945 tempest-ListServersNegativeTestJSON-359424066 tempest-ListServersNegativeTestJSON-359424066-project-member] [instance: 55e8066d-cb8f-4731-b5bd-57adfebae81a] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1523.188902] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5e4074de-db31-4601-ae78-a3f9fd68c304 tempest-InstanceActionsNegativeTestJSON-269499218 tempest-InstanceActionsNegativeTestJSON-269499218-project-member] Lock "5e748c4b-03c5-4a88-a4ed-27093f2aef47" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.234s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.188902] env[61663]: DEBUG nova.compute.manager [None req-e4d76654-45e2-4b2a-8895-7c5c4d35a945 tempest-ListServersNegativeTestJSON-359424066 tempest-ListServersNegativeTestJSON-359424066-project-member] [instance: 55e8066d-cb8f-4731-b5bd-57adfebae81a] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1523.212595] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e4d76654-45e2-4b2a-8895-7c5c4d35a945 tempest-ListServersNegativeTestJSON-359424066 tempest-ListServersNegativeTestJSON-359424066-project-member] Lock "55e8066d-cb8f-4731-b5bd-57adfebae81a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.609s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.221822] env[61663]: DEBUG nova.compute.manager [None req-aaf9ffc7-6ead-4884-aabb-1c4d0fcba52f tempest-AttachInterfacesV270Test-1092600010 tempest-AttachInterfacesV270Test-1092600010-project-member] [instance: e56bdc9e-964f-4994-be20-9a981095f813] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1523.246400] env[61663]: DEBUG nova.compute.manager [None req-aaf9ffc7-6ead-4884-aabb-1c4d0fcba52f tempest-AttachInterfacesV270Test-1092600010 tempest-AttachInterfacesV270Test-1092600010-project-member] [instance: e56bdc9e-964f-4994-be20-9a981095f813] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1523.296794] env[61663]: DEBUG oslo_concurrency.lockutils [None req-aaf9ffc7-6ead-4884-aabb-1c4d0fcba52f tempest-AttachInterfacesV270Test-1092600010 tempest-AttachInterfacesV270Test-1092600010-project-member] Lock "e56bdc9e-964f-4994-be20-9a981095f813" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.179s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.310416] env[61663]: DEBUG nova.compute.manager [None req-883e33b0-fd45-4945-b405-76ea88b6cbc8 tempest-ListImageFiltersTestJSON-496746134 tempest-ListImageFiltersTestJSON-496746134-project-member] [instance: 5d2229e3-08b2-432b-98b5-95cc9f6e649f] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1523.341200] env[61663]: DEBUG nova.compute.manager [None req-883e33b0-fd45-4945-b405-76ea88b6cbc8 tempest-ListImageFiltersTestJSON-496746134 tempest-ListImageFiltersTestJSON-496746134-project-member] [instance: 5d2229e3-08b2-432b-98b5-95cc9f6e649f] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1523.364848] env[61663]: DEBUG oslo_concurrency.lockutils [None req-883e33b0-fd45-4945-b405-76ea88b6cbc8 tempest-ListImageFiltersTestJSON-496746134 tempest-ListImageFiltersTestJSON-496746134-project-member] Lock "5d2229e3-08b2-432b-98b5-95cc9f6e649f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.561s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.380580] env[61663]: DEBUG nova.compute.manager [None req-3cc7ddf2-952f-4ac2-ab15-d4b41d10ef52 tempest-ListImageFiltersTestJSON-496746134 tempest-ListImageFiltersTestJSON-496746134-project-member] [instance: 0c42630f-9e39-4eeb-aa56-d953cff6b4a9] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1523.403929] env[61663]: DEBUG nova.compute.manager [None req-3cc7ddf2-952f-4ac2-ab15-d4b41d10ef52 tempest-ListImageFiltersTestJSON-496746134 tempest-ListImageFiltersTestJSON-496746134-project-member] [instance: 0c42630f-9e39-4eeb-aa56-d953cff6b4a9] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1523.430166] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3cc7ddf2-952f-4ac2-ab15-d4b41d10ef52 tempest-ListImageFiltersTestJSON-496746134 tempest-ListImageFiltersTestJSON-496746134-project-member] Lock "0c42630f-9e39-4eeb-aa56-d953cff6b4a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.124s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.442427] env[61663]: DEBUG nova.compute.manager [None req-aa3ce05a-c267-4627-874a-f41a24704db2 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 562e2e30-99d6-4edd-8382-e2e765c6a449] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1523.470108] env[61663]: DEBUG nova.compute.manager [None req-aa3ce05a-c267-4627-874a-f41a24704db2 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 562e2e30-99d6-4edd-8382-e2e765c6a449] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1523.498039] env[61663]: DEBUG oslo_concurrency.lockutils [None req-aa3ce05a-c267-4627-874a-f41a24704db2 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Lock "562e2e30-99d6-4edd-8382-e2e765c6a449" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 233.413s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.513728] env[61663]: DEBUG nova.compute.manager [None req-16401c01-01df-4ef5-aac0-9dbc372bcfd3 tempest-DeleteServersAdminTestJSON-1903805484 tempest-DeleteServersAdminTestJSON-1903805484-project-member] [instance: ffe47fec-5f84-4a9a-a103-e59a90201064] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1523.543203] env[61663]: DEBUG nova.compute.manager [None req-16401c01-01df-4ef5-aac0-9dbc372bcfd3 tempest-DeleteServersAdminTestJSON-1903805484 tempest-DeleteServersAdminTestJSON-1903805484-project-member] [instance: ffe47fec-5f84-4a9a-a103-e59a90201064] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1523.566658] env[61663]: DEBUG oslo_concurrency.lockutils [None req-16401c01-01df-4ef5-aac0-9dbc372bcfd3 tempest-DeleteServersAdminTestJSON-1903805484 tempest-DeleteServersAdminTestJSON-1903805484-project-member] Lock "ffe47fec-5f84-4a9a-a103-e59a90201064" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.361s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.577429] env[61663]: DEBUG nova.compute.manager [None req-4980148b-bb2b-4f25-8e8c-0c40171d8aa7 tempest-ServerPasswordTestJSON-28149881 tempest-ServerPasswordTestJSON-28149881-project-member] [instance: 25222911-beaf-4f80-be5e-a6decd09958d] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1523.602613] env[61663]: DEBUG nova.compute.manager [None req-4980148b-bb2b-4f25-8e8c-0c40171d8aa7 tempest-ServerPasswordTestJSON-28149881 tempest-ServerPasswordTestJSON-28149881-project-member] [instance: 25222911-beaf-4f80-be5e-a6decd09958d] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1523.624498] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4980148b-bb2b-4f25-8e8c-0c40171d8aa7 tempest-ServerPasswordTestJSON-28149881 tempest-ServerPasswordTestJSON-28149881-project-member] Lock "25222911-beaf-4f80-be5e-a6decd09958d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.528s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.633898] env[61663]: DEBUG nova.compute.manager [None req-c32361a0-91fc-428e-bd66-782734a43df3 tempest-ServerDiagnosticsV248Test-560508285 tempest-ServerDiagnosticsV248Test-560508285-project-member] [instance: 4e94548a-c81b-46c1-886b-bb5e2b8ebf9d] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1523.660718] env[61663]: DEBUG nova.compute.manager [None req-c32361a0-91fc-428e-bd66-782734a43df3 tempest-ServerDiagnosticsV248Test-560508285 tempest-ServerDiagnosticsV248Test-560508285-project-member] [instance: 4e94548a-c81b-46c1-886b-bb5e2b8ebf9d] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1523.683499] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c32361a0-91fc-428e-bd66-782734a43df3 tempest-ServerDiagnosticsV248Test-560508285 tempest-ServerDiagnosticsV248Test-560508285-project-member] Lock "4e94548a-c81b-46c1-886b-bb5e2b8ebf9d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.363s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.693119] env[61663]: DEBUG nova.compute.manager [None req-f39f149a-5cc6-43d7-8bc6-629db9bdfd8f tempest-ServersTestManualDisk-1712154969 tempest-ServersTestManualDisk-1712154969-project-member] [instance: 9f599f13-3906-4766-a1a5-a324da916370] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1523.719664] env[61663]: DEBUG nova.compute.manager [None req-f39f149a-5cc6-43d7-8bc6-629db9bdfd8f tempest-ServersTestManualDisk-1712154969 tempest-ServersTestManualDisk-1712154969-project-member] [instance: 9f599f13-3906-4766-a1a5-a324da916370] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1523.742913] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f39f149a-5cc6-43d7-8bc6-629db9bdfd8f tempest-ServersTestManualDisk-1712154969 tempest-ServersTestManualDisk-1712154969-project-member] Lock "9f599f13-3906-4766-a1a5-a324da916370" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.228s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.753053] env[61663]: DEBUG nova.compute.manager [None req-39dc81b8-87d1-4c46-bc00-4d97ad4903e7 tempest-ServersTestBootFromVolume-1644706521 tempest-ServersTestBootFromVolume-1644706521-project-member] [instance: 5def97c4-7d72-4ade-bb17-160e91f67f75] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1523.783905] env[61663]: DEBUG nova.compute.manager [None req-39dc81b8-87d1-4c46-bc00-4d97ad4903e7 tempest-ServersTestBootFromVolume-1644706521 tempest-ServersTestBootFromVolume-1644706521-project-member] [instance: 5def97c4-7d72-4ade-bb17-160e91f67f75] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1523.807778] env[61663]: DEBUG oslo_concurrency.lockutils [None req-39dc81b8-87d1-4c46-bc00-4d97ad4903e7 tempest-ServersTestBootFromVolume-1644706521 tempest-ServersTestBootFromVolume-1644706521-project-member] Lock "5def97c4-7d72-4ade-bb17-160e91f67f75" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.286s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.820839] env[61663]: DEBUG nova.compute.manager [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1523.879590] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.879590] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.880698] env[61663]: INFO nova.compute.claims [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1524.308816] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e85466c-8dec-476b-9e0b-69d9fedede27 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.321778] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b962a70a-7286-4ae0-be04-83ebd89275cf {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.354516] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a06aca-0a41-48ca-bcee-1e5467cd4540 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.362948] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cb6a4f3-8be8-4e84-a281-7bb2b66483e0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.389615] env[61663]: DEBUG nova.compute.provider_tree [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1524.403970] env[61663]: DEBUG nova.scheduler.client.report [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1524.462032] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.584s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.464269] env[61663]: DEBUG nova.compute.manager [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1524.522267] env[61663]: DEBUG nova.compute.utils [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1524.525708] env[61663]: DEBUG nova.compute.manager [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1524.525708] env[61663]: DEBUG nova.network.neutron [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1524.536195] env[61663]: DEBUG nova.compute.manager [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1524.603823] env[61663]: DEBUG nova.compute.manager [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1524.638493] env[61663]: DEBUG nova.virt.hardware [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1524.638720] env[61663]: DEBUG nova.virt.hardware [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1524.638872] env[61663]: DEBUG nova.virt.hardware [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1524.639077] env[61663]: DEBUG nova.virt.hardware [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1524.639507] env[61663]: DEBUG nova.virt.hardware [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1524.639667] env[61663]: DEBUG nova.virt.hardware [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1524.639885] env[61663]: DEBUG nova.virt.hardware [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1524.640058] env[61663]: DEBUG nova.virt.hardware [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1524.640230] env[61663]: DEBUG nova.virt.hardware [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1524.640394] env[61663]: DEBUG nova.virt.hardware [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1524.644254] env[61663]: DEBUG nova.virt.hardware [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1524.644254] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d71863-a468-4950-b7c6-95ef5713e576 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.651994] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f400d45-482b-4fe5-b869-3c3684a6c5bf {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.668580] env[61663]: DEBUG nova.policy [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '670d4550ac924a3787cbf6dfe1bf36b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba20bc7d580b4d7fa87a7b2fcc0c51d9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1525.884091] env[61663]: DEBUG nova.network.neutron [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Successfully created port: 7de7142b-3745-4c3f-9b95-78eb2feb1252 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1527.174856] env[61663]: DEBUG nova.network.neutron [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Successfully updated port: 7de7142b-3745-4c3f-9b95-78eb2feb1252 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1527.188955] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Acquiring lock "refresh_cache-1305216b-0ee5-499a-a82a-30b45a8c832c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1527.189178] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Acquired lock "refresh_cache-1305216b-0ee5-499a-a82a-30b45a8c832c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1527.189346] env[61663]: DEBUG nova.network.neutron [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1527.204910] env[61663]: DEBUG nova.compute.manager [req-f1dd7f28-81d3-4fd2-bad3-fe1a6ef9d736 req-6cd15565-2cbc-407e-866f-c377bde33769 service nova] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Received event network-vif-plugged-7de7142b-3745-4c3f-9b95-78eb2feb1252 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1527.205172] env[61663]: DEBUG oslo_concurrency.lockutils [req-f1dd7f28-81d3-4fd2-bad3-fe1a6ef9d736 req-6cd15565-2cbc-407e-866f-c377bde33769 service nova] Acquiring lock "1305216b-0ee5-499a-a82a-30b45a8c832c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1527.205483] env[61663]: DEBUG oslo_concurrency.lockutils [req-f1dd7f28-81d3-4fd2-bad3-fe1a6ef9d736 req-6cd15565-2cbc-407e-866f-c377bde33769 service nova] Lock "1305216b-0ee5-499a-a82a-30b45a8c832c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1527.205682] env[61663]: DEBUG oslo_concurrency.lockutils [req-f1dd7f28-81d3-4fd2-bad3-fe1a6ef9d736 req-6cd15565-2cbc-407e-866f-c377bde33769 service nova] Lock "1305216b-0ee5-499a-a82a-30b45a8c832c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1527.205869] env[61663]: DEBUG nova.compute.manager [req-f1dd7f28-81d3-4fd2-bad3-fe1a6ef9d736 req-6cd15565-2cbc-407e-866f-c377bde33769 service nova] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] No waiting events found dispatching network-vif-plugged-7de7142b-3745-4c3f-9b95-78eb2feb1252 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1527.206064] env[61663]: WARNING nova.compute.manager [req-f1dd7f28-81d3-4fd2-bad3-fe1a6ef9d736 req-6cd15565-2cbc-407e-866f-c377bde33769 service nova] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Received unexpected event network-vif-plugged-7de7142b-3745-4c3f-9b95-78eb2feb1252 for instance with vm_state building and task_state spawning. [ 1527.385924] env[61663]: DEBUG nova.network.neutron [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1527.912951] env[61663]: DEBUG nova.network.neutron [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Updating instance_info_cache with network_info: [{"id": "7de7142b-3745-4c3f-9b95-78eb2feb1252", "address": "fa:16:3e:4d:14:a0", "network": {"id": "4881720d-78ec-4fa5-ac05-915e1d68e0e0", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-829948714-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba20bc7d580b4d7fa87a7b2fcc0c51d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8145bd31-c4a7-4828-8818-d065010c9565", "external-id": "nsx-vlan-transportzone-760", "segmentation_id": 760, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7de7142b-37", "ovs_interfaceid": "7de7142b-3745-4c3f-9b95-78eb2feb1252", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1527.928039] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Releasing lock "refresh_cache-1305216b-0ee5-499a-a82a-30b45a8c832c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1527.928493] env[61663]: DEBUG nova.compute.manager [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Instance network_info: |[{"id": "7de7142b-3745-4c3f-9b95-78eb2feb1252", "address": "fa:16:3e:4d:14:a0", "network": {"id": "4881720d-78ec-4fa5-ac05-915e1d68e0e0", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-829948714-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba20bc7d580b4d7fa87a7b2fcc0c51d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8145bd31-c4a7-4828-8818-d065010c9565", "external-id": "nsx-vlan-transportzone-760", "segmentation_id": 760, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7de7142b-37", "ovs_interfaceid": "7de7142b-3745-4c3f-9b95-78eb2feb1252", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1527.928840] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:14:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8145bd31-c4a7-4828-8818-d065010c9565', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7de7142b-3745-4c3f-9b95-78eb2feb1252', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1527.937129] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Creating folder: Project (ba20bc7d580b4d7fa87a7b2fcc0c51d9). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1527.937768] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ccce3694-a42d-4423-b5c6-cfa06ecbe9cc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.950026] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Created folder: Project (ba20bc7d580b4d7fa87a7b2fcc0c51d9) in parent group-v352575. [ 1527.950159] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Creating folder: Instances. Parent ref: group-v352623. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1527.950402] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf7009d1-02a6-4289-84ec-1dadc6091780 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.958896] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Created folder: Instances in parent group-v352623. [ 1527.959201] env[61663]: DEBUG oslo.service.loopingcall [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1527.959429] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1527.959668] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d6c06aa5-0e2f-4027-ad16-3cffaab1f79c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.982181] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1527.982181] env[61663]: value = "task-1690733" [ 1527.982181] env[61663]: _type = "Task" [ 1527.982181] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.989976] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690733, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.493907] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690733, 'name': CreateVM_Task, 'duration_secs': 0.298509} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.494175] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1528.494874] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1528.495143] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1528.495468] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1528.495712] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f387d219-e446-4acc-ba77-d80b21a5e8da {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.500911] env[61663]: DEBUG oslo_vmware.api [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Waiting for the task: (returnval){ [ 1528.500911] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52918f74-13ca-5a3e-34cb-8aa2f014423c" [ 1528.500911] env[61663]: _type = "Task" [ 1528.500911] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.511408] env[61663]: DEBUG oslo_vmware.api [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52918f74-13ca-5a3e-34cb-8aa2f014423c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.701311] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1528.701918] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1528.701918] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1528.730490] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1528.731349] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1528.731349] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1528.731349] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1528.731349] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1528.731349] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1528.731725] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1528.731725] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 04488672-86c4-415b-961e-94641d570112] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1528.731725] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1528.731977] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1528.732079] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1528.732576] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1529.015094] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1529.015355] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1529.015566] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1529.370068] env[61663]: DEBUG nova.compute.manager [req-4f37efe9-245c-46b9-99cb-7eb622d644fa req-0a924df1-1c15-44d6-81f8-9ab95cc2de7b service nova] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Received event network-changed-7de7142b-3745-4c3f-9b95-78eb2feb1252 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1529.370327] env[61663]: DEBUG nova.compute.manager [req-4f37efe9-245c-46b9-99cb-7eb622d644fa req-0a924df1-1c15-44d6-81f8-9ab95cc2de7b service nova] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Refreshing instance network info cache due to event network-changed-7de7142b-3745-4c3f-9b95-78eb2feb1252. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1529.372864] env[61663]: DEBUG oslo_concurrency.lockutils [req-4f37efe9-245c-46b9-99cb-7eb622d644fa req-0a924df1-1c15-44d6-81f8-9ab95cc2de7b service nova] Acquiring lock "refresh_cache-1305216b-0ee5-499a-a82a-30b45a8c832c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1529.372864] env[61663]: DEBUG oslo_concurrency.lockutils [req-4f37efe9-245c-46b9-99cb-7eb622d644fa req-0a924df1-1c15-44d6-81f8-9ab95cc2de7b service nova] Acquired lock "refresh_cache-1305216b-0ee5-499a-a82a-30b45a8c832c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1529.372864] env[61663]: DEBUG nova.network.neutron [req-4f37efe9-245c-46b9-99cb-7eb622d644fa req-0a924df1-1c15-44d6-81f8-9ab95cc2de7b service nova] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Refreshing network info cache for port 7de7142b-3745-4c3f-9b95-78eb2feb1252 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1529.694608] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1529.710329] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.710559] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.710725] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.710881] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1529.712157] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7401a54-d4bd-4818-9579-124650d500ba {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.723606] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb372f05-426d-45c8-aa8a-8f44329535b3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.739094] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f05561-5c76-4eb5-9bd6-59b9091e8ac1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.747180] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-528225bf-7d9d-4ef9-ba0a-bd5f478c7363 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.780917] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181311MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1529.781099] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.781316] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.899495] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 71509f58-5616-4d6a-9a88-3bfd9d414a0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1529.899652] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 790791ee-4e6c-4116-8ade-ba61f55ebd4d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1529.899782] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance f9a675b6-e76d-492b-ac34-3c7b10553fca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1529.899907] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1529.900038] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 6f7a3a1f-859d-42f5-b986-6a1a038ca536 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1529.900161] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1529.900285] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ee0e3e54-c135-489f-87ca-f441efebcbd5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1529.900401] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 04488672-86c4-415b-961e-94641d570112 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1529.900515] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 7a7a0ef0-bbea-42c0-b96e-4efc4207a655 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1529.900632] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 1305216b-0ee5-499a-a82a-30b45a8c832c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1529.903662] env[61663]: DEBUG nova.network.neutron [req-4f37efe9-245c-46b9-99cb-7eb622d644fa req-0a924df1-1c15-44d6-81f8-9ab95cc2de7b service nova] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Updated VIF entry in instance network info cache for port 7de7142b-3745-4c3f-9b95-78eb2feb1252. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1529.904205] env[61663]: DEBUG nova.network.neutron [req-4f37efe9-245c-46b9-99cb-7eb622d644fa req-0a924df1-1c15-44d6-81f8-9ab95cc2de7b service nova] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Updating instance_info_cache with network_info: [{"id": "7de7142b-3745-4c3f-9b95-78eb2feb1252", "address": "fa:16:3e:4d:14:a0", "network": {"id": "4881720d-78ec-4fa5-ac05-915e1d68e0e0", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-829948714-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba20bc7d580b4d7fa87a7b2fcc0c51d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8145bd31-c4a7-4828-8818-d065010c9565", "external-id": "nsx-vlan-transportzone-760", "segmentation_id": 760, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7de7142b-37", "ovs_interfaceid": "7de7142b-3745-4c3f-9b95-78eb2feb1252", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1529.915344] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 47e1551e-ac80-4b4e-b568-3931c6dcf3b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1529.919770] env[61663]: DEBUG oslo_concurrency.lockutils [req-4f37efe9-245c-46b9-99cb-7eb622d644fa req-0a924df1-1c15-44d6-81f8-9ab95cc2de7b service nova] Releasing lock "refresh_cache-1305216b-0ee5-499a-a82a-30b45a8c832c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1529.931571] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1529.946834] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 87c4b17f-9890-44fe-9974-0f6c45e316d3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1529.962258] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 39380f25-15a5-4d8f-b38b-39e1b3561314 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1529.974960] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ef8528db-1338-4af6-9d4a-5eda7fe69a98 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1529.987672] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 536c3f6e-757d-4b59-bf82-c01f735746d3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1529.998422] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 058b6485-898e-4799-899a-df5297144271 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1530.012068] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance f2a87111-4361-4e0a-940c-3c163c2d5e72 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1530.028436] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 26e22311-811c-49cf-b2df-40822f2e4f3c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1530.040920] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance f71fce22-f27b-4e5d-94e4-697d09377ed1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1530.054795] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 06030fd6-0e35-42dc-bd66-cfc95930e90a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1530.067581] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 65cdd238-4875-4dad-9df0-0aeda65ab9ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1530.083027] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance f3d817b9-1a93-4fb8-b25c-756de9152f17 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1530.094365] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5c76183a-cdcb-49e7-95b9-75a635352479 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1530.109042] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 14682ec1-2d3f-4601-a48e-832e7f2072d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1530.109042] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1530.109042] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1530.488561] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67299a3e-0b41-4ae5-9659-5ab7ab0dd267 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.496629] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33bd3e9-40a5-4423-b609-9b2228cab1a2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.529007] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29565a9f-2027-4885-b53e-1ed53e802d12 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.536454] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b3a039-f00a-4de8-9d58-0afbf0b2b8f1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.549727] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1530.562678] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1530.586484] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1530.586694] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.805s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.585241] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1531.687804] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1533.338125] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Acquiring lock "94f7665c-5247-4474-a9ea-700f1778af81" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1533.338125] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Lock "94f7665c-5247-4474-a9ea-700f1778af81" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1533.692040] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1534.692207] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1536.612556] env[61663]: DEBUG oslo_concurrency.lockutils [None req-69e4b45a-48a1-465f-8bce-926dff054221 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Acquiring lock "1305216b-0ee5-499a-a82a-30b45a8c832c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1537.688301] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1537.711118] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1537.711470] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1537.711470] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1569.175055] env[61663]: WARNING oslo_vmware.rw_handles [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1569.175055] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1569.175055] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1569.175055] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1569.175055] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1569.175055] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 1569.175055] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1569.175055] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1569.175055] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1569.175055] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1569.175055] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1569.175055] env[61663]: ERROR oslo_vmware.rw_handles [ 1569.175055] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/557c103b-b29e-4b9e-af39-fb62929b9d10/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1569.177655] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1569.177655] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Copying Virtual Disk [datastore1] vmware_temp/557c103b-b29e-4b9e-af39-fb62929b9d10/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/557c103b-b29e-4b9e-af39-fb62929b9d10/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1569.177655] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bb301198-d14a-42c8-bc8e-4cd021a43891 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.186385] env[61663]: DEBUG oslo_vmware.api [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Waiting for the task: (returnval){ [ 1569.186385] env[61663]: value = "task-1690734" [ 1569.186385] env[61663]: _type = "Task" [ 1569.186385] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.194390] env[61663]: DEBUG oslo_vmware.api [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Task: {'id': task-1690734, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.697198] env[61663]: DEBUG oslo_vmware.exceptions [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1569.697433] env[61663]: DEBUG oslo_concurrency.lockutils [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1569.697976] env[61663]: ERROR nova.compute.manager [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1569.697976] env[61663]: Faults: ['InvalidArgument'] [ 1569.697976] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Traceback (most recent call last): [ 1569.697976] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1569.697976] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] yield resources [ 1569.697976] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1569.697976] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] self.driver.spawn(context, instance, image_meta, [ 1569.697976] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1569.697976] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1569.697976] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1569.697976] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] self._fetch_image_if_missing(context, vi) [ 1569.697976] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1569.698512] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] image_cache(vi, tmp_image_ds_loc) [ 1569.698512] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1569.698512] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] vm_util.copy_virtual_disk( [ 1569.698512] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1569.698512] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] session._wait_for_task(vmdk_copy_task) [ 1569.698512] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1569.698512] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] return self.wait_for_task(task_ref) [ 1569.698512] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1569.698512] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] return evt.wait() [ 1569.698512] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1569.698512] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] result = hub.switch() [ 1569.698512] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1569.698512] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] return self.greenlet.switch() [ 1569.698939] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1569.698939] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] self.f(*self.args, **self.kw) [ 1569.698939] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1569.698939] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] raise exceptions.translate_fault(task_info.error) [ 1569.698939] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1569.698939] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Faults: ['InvalidArgument'] [ 1569.698939] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] [ 1569.698939] env[61663]: INFO nova.compute.manager [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Terminating instance [ 1569.699936] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1569.700154] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1569.700418] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-598338d0-68d2-40b9-a868-842f217c9949 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.702881] env[61663]: DEBUG nova.compute.manager [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1569.703028] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1569.703747] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3e57255-b716-4cfd-a179-88f39aef9816 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.710416] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1569.710637] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-263dffa0-451a-48a7-9c14-ce53c2ac69b8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.712905] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1569.713092] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1569.714036] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cfa0771-ca0e-4dbe-970f-a83876c73056 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.719585] env[61663]: DEBUG oslo_vmware.api [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Waiting for the task: (returnval){ [ 1569.719585] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52aa796e-f57c-ce64-9888-57b77c46d3c4" [ 1569.719585] env[61663]: _type = "Task" [ 1569.719585] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.726252] env[61663]: DEBUG oslo_vmware.api [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52aa796e-f57c-ce64-9888-57b77c46d3c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.790687] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1569.790927] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1569.791132] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Deleting the datastore file [datastore1] 71509f58-5616-4d6a-9a88-3bfd9d414a0c {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1569.791419] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a0612dfb-761f-4db9-9f2c-e29c366feee0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.797551] env[61663]: DEBUG oslo_vmware.api [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Waiting for the task: (returnval){ [ 1569.797551] env[61663]: value = "task-1690736" [ 1569.797551] env[61663]: _type = "Task" [ 1569.797551] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.806240] env[61663]: DEBUG oslo_vmware.api [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Task: {'id': task-1690736, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.231568] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1570.231933] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Creating directory with path [datastore1] vmware_temp/b1e7f8f9-ef2e-4778-b981-6492c8daaf60/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1570.231978] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-89eb9de2-6612-4f2a-8478-e2500bcf9880 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.243286] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Created directory with path [datastore1] vmware_temp/b1e7f8f9-ef2e-4778-b981-6492c8daaf60/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1570.243502] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Fetch image to [datastore1] vmware_temp/b1e7f8f9-ef2e-4778-b981-6492c8daaf60/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1570.243691] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/b1e7f8f9-ef2e-4778-b981-6492c8daaf60/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1570.244421] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b46878-da26-4b46-b6be-8704ca20d084 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.251251] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7728f4b6-3d03-46ac-9abc-35dd442008df {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.260508] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f8e6bf5-5ee9-4fb3-862d-fbafbc15a223 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.290738] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e92b393-608b-44a1-a14e-d589876dd3e5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.296521] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3d3a1d91-7c2e-4efc-84b7-e2163c3deaa6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.305403] env[61663]: DEBUG oslo_vmware.api [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Task: {'id': task-1690736, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067097} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.305636] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1570.305833] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1570.306087] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1570.306276] env[61663]: INFO nova.compute.manager [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1570.308579] env[61663]: DEBUG nova.compute.claims [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1570.308665] env[61663]: DEBUG oslo_concurrency.lockutils [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1570.308885] env[61663]: DEBUG oslo_concurrency.lockutils [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1570.318916] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1570.371405] env[61663]: DEBUG oslo_vmware.rw_handles [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b1e7f8f9-ef2e-4778-b981-6492c8daaf60/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1570.438694] env[61663]: DEBUG oslo_vmware.rw_handles [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1570.438694] env[61663]: DEBUG oslo_vmware.rw_handles [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b1e7f8f9-ef2e-4778-b981-6492c8daaf60/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1570.713065] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c00d975c-5444-4a4b-ad35-5e1de26681a9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.721131] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d7318b6-f7b5-4049-a95c-d352cdfa8d39 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.752683] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a20b80b5-9ba7-456a-a573-63f0b736ca73 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.759865] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad98b64b-885c-4705-bc4c-54f115df0331 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.772962] env[61663]: DEBUG nova.compute.provider_tree [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1570.782086] env[61663]: DEBUG nova.scheduler.client.report [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1570.798821] env[61663]: DEBUG oslo_concurrency.lockutils [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.490s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1570.799438] env[61663]: ERROR nova.compute.manager [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1570.799438] env[61663]: Faults: ['InvalidArgument'] [ 1570.799438] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Traceback (most recent call last): [ 1570.799438] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1570.799438] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] self.driver.spawn(context, instance, image_meta, [ 1570.799438] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1570.799438] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1570.799438] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1570.799438] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] self._fetch_image_if_missing(context, vi) [ 1570.799438] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1570.799438] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] image_cache(vi, tmp_image_ds_loc) [ 1570.799438] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1570.799851] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] vm_util.copy_virtual_disk( [ 1570.799851] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1570.799851] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] session._wait_for_task(vmdk_copy_task) [ 1570.799851] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1570.799851] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] return self.wait_for_task(task_ref) [ 1570.799851] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1570.799851] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] return evt.wait() [ 1570.799851] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1570.799851] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] result = hub.switch() [ 1570.799851] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1570.799851] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] return self.greenlet.switch() [ 1570.799851] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1570.799851] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] self.f(*self.args, **self.kw) [ 1570.800241] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1570.800241] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] raise exceptions.translate_fault(task_info.error) [ 1570.800241] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1570.800241] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Faults: ['InvalidArgument'] [ 1570.800241] env[61663]: ERROR nova.compute.manager [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] [ 1570.800241] env[61663]: DEBUG nova.compute.utils [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1570.801890] env[61663]: DEBUG nova.compute.manager [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Build of instance 71509f58-5616-4d6a-9a88-3bfd9d414a0c was re-scheduled: A specified parameter was not correct: fileType [ 1570.801890] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1570.802279] env[61663]: DEBUG nova.compute.manager [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1570.802444] env[61663]: DEBUG nova.compute.manager [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1570.802620] env[61663]: DEBUG nova.compute.manager [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1570.802786] env[61663]: DEBUG nova.network.neutron [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1571.263937] env[61663]: DEBUG nova.network.neutron [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1571.276015] env[61663]: INFO nova.compute.manager [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Took 0.47 seconds to deallocate network for instance. [ 1571.385441] env[61663]: INFO nova.scheduler.client.report [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Deleted allocations for instance 71509f58-5616-4d6a-9a88-3bfd9d414a0c [ 1571.405599] env[61663]: DEBUG oslo_concurrency.lockutils [None req-abdd8bae-6188-4ca5-a9af-f1e4bf73b68b tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Lock "71509f58-5616-4d6a-9a88-3bfd9d414a0c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 327.511s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1571.406774] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e3c2b17e-970a-4257-abb9-3b2d97c883da tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Lock "71509f58-5616-4d6a-9a88-3bfd9d414a0c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 128.793s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1571.406950] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e3c2b17e-970a-4257-abb9-3b2d97c883da tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Acquiring lock "71509f58-5616-4d6a-9a88-3bfd9d414a0c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1571.407173] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e3c2b17e-970a-4257-abb9-3b2d97c883da tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Lock "71509f58-5616-4d6a-9a88-3bfd9d414a0c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1571.407364] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e3c2b17e-970a-4257-abb9-3b2d97c883da tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Lock "71509f58-5616-4d6a-9a88-3bfd9d414a0c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1571.409614] env[61663]: INFO nova.compute.manager [None req-e3c2b17e-970a-4257-abb9-3b2d97c883da tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Terminating instance [ 1571.415023] env[61663]: DEBUG nova.compute.manager [None req-e3c2b17e-970a-4257-abb9-3b2d97c883da tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1571.415023] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c2b17e-970a-4257-abb9-3b2d97c883da tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1571.415689] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01dedf3e-9a3c-4191-bcb1-8f044dc79989 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.428142] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b0b1f36-a8d1-4581-a2ce-8768bfdd3ed2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.439927] env[61663]: DEBUG nova.compute.manager [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1571.463180] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-e3c2b17e-970a-4257-abb9-3b2d97c883da tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 71509f58-5616-4d6a-9a88-3bfd9d414a0c could not be found. [ 1571.463180] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c2b17e-970a-4257-abb9-3b2d97c883da tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1571.463180] env[61663]: INFO nova.compute.manager [None req-e3c2b17e-970a-4257-abb9-3b2d97c883da tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1571.463180] env[61663]: DEBUG oslo.service.loopingcall [None req-e3c2b17e-970a-4257-abb9-3b2d97c883da tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1571.463180] env[61663]: DEBUG nova.compute.manager [-] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1571.463389] env[61663]: DEBUG nova.network.neutron [-] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1571.493799] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1571.494062] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1571.495589] env[61663]: INFO nova.compute.claims [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1571.499396] env[61663]: DEBUG nova.network.neutron [-] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1571.507836] env[61663]: INFO nova.compute.manager [-] [instance: 71509f58-5616-4d6a-9a88-3bfd9d414a0c] Took 0.04 seconds to deallocate network for instance. [ 1571.606262] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e3c2b17e-970a-4257-abb9-3b2d97c883da tempest-ServerActionsTestJSON-1880419134 tempest-ServerActionsTestJSON-1880419134-project-member] Lock "71509f58-5616-4d6a-9a88-3bfd9d414a0c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.199s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1571.853205] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7c106e-dec0-4fe4-9c47-4be837855d79 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.860738] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc1dbed3-b9ab-4756-bc2c-12530bf4ddde {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.891156] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e8a831e-15aa-4feb-8f76-92f9f2ea04da {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.898203] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6394945-44e4-479f-b4bc-49758d1b75b5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.911114] env[61663]: DEBUG nova.compute.provider_tree [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1571.919997] env[61663]: DEBUG nova.scheduler.client.report [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1571.936448] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.442s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1571.936931] env[61663]: DEBUG nova.compute.manager [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1571.974630] env[61663]: DEBUG nova.compute.utils [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1571.975849] env[61663]: DEBUG nova.compute.manager [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1571.976362] env[61663]: DEBUG nova.network.neutron [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1571.985971] env[61663]: DEBUG nova.compute.manager [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1572.052373] env[61663]: DEBUG nova.compute.manager [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1572.079916] env[61663]: DEBUG nova.virt.hardware [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1572.080174] env[61663]: DEBUG nova.virt.hardware [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1572.080353] env[61663]: DEBUG nova.virt.hardware [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1572.080575] env[61663]: DEBUG nova.virt.hardware [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1572.080728] env[61663]: DEBUG nova.virt.hardware [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1572.080878] env[61663]: DEBUG nova.virt.hardware [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1572.081097] env[61663]: DEBUG nova.virt.hardware [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1572.081263] env[61663]: DEBUG nova.virt.hardware [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1572.081432] env[61663]: DEBUG nova.virt.hardware [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1572.081613] env[61663]: DEBUG nova.virt.hardware [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1572.081769] env[61663]: DEBUG nova.virt.hardware [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1572.082727] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6aa2c46-5085-42b0-9cc8-0eb8fb0fa8ce {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.086421] env[61663]: DEBUG nova.policy [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4cb95957e7c24c28857753b894e3fae9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9945c1e1dda04f3eb84b3e34072ec0be', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1572.093160] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da13d27d-c47a-4057-ba6b-cb4707a60d5f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.622733] env[61663]: DEBUG nova.network.neutron [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Successfully created port: 884ec50a-a9f5-4eb6-b21c-03d4cadf988c {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1574.248881] env[61663]: DEBUG nova.network.neutron [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Successfully updated port: 884ec50a-a9f5-4eb6-b21c-03d4cadf988c {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1574.268159] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Acquiring lock "refresh_cache-47e1551e-ac80-4b4e-b568-3931c6dcf3b3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1574.268318] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Acquired lock "refresh_cache-47e1551e-ac80-4b4e-b568-3931c6dcf3b3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1574.268473] env[61663]: DEBUG nova.network.neutron [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1574.334086] env[61663]: DEBUG nova.network.neutron [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1574.651139] env[61663]: DEBUG nova.network.neutron [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Updating instance_info_cache with network_info: [{"id": "884ec50a-a9f5-4eb6-b21c-03d4cadf988c", "address": "fa:16:3e:06:28:78", "network": {"id": "729ac288-d65a-4e40-832d-a5e257f6c907", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1730267512-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9945c1e1dda04f3eb84b3e34072ec0be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c09cc47-a7d0-4816-bee4-69cc9f2e04b0", "external-id": "nsx-vlan-transportzone-687", "segmentation_id": 687, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap884ec50a-a9", "ovs_interfaceid": "884ec50a-a9f5-4eb6-b21c-03d4cadf988c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1574.676370] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Releasing lock "refresh_cache-47e1551e-ac80-4b4e-b568-3931c6dcf3b3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1574.678710] env[61663]: DEBUG nova.compute.manager [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Instance network_info: |[{"id": "884ec50a-a9f5-4eb6-b21c-03d4cadf988c", "address": "fa:16:3e:06:28:78", "network": {"id": "729ac288-d65a-4e40-832d-a5e257f6c907", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1730267512-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9945c1e1dda04f3eb84b3e34072ec0be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c09cc47-a7d0-4816-bee4-69cc9f2e04b0", "external-id": "nsx-vlan-transportzone-687", "segmentation_id": 687, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap884ec50a-a9", "ovs_interfaceid": "884ec50a-a9f5-4eb6-b21c-03d4cadf988c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1574.678955] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:28:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7c09cc47-a7d0-4816-bee4-69cc9f2e04b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '884ec50a-a9f5-4eb6-b21c-03d4cadf988c', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1574.685061] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Creating folder: Project (9945c1e1dda04f3eb84b3e34072ec0be). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1574.686207] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e6a3dc63-c6ac-4fda-8435-9d8e54291445 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.700135] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Created folder: Project (9945c1e1dda04f3eb84b3e34072ec0be) in parent group-v352575. [ 1574.700353] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Creating folder: Instances. Parent ref: group-v352626. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1574.700602] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-edcc7b3d-7df1-4827-86b9-a9fa86510016 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.706137] env[61663]: DEBUG nova.compute.manager [req-481a0b1b-809c-46f8-9111-13732b484a07 req-3d81e215-914d-4724-8d07-9ca87be3c447 service nova] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Received event network-vif-plugged-884ec50a-a9f5-4eb6-b21c-03d4cadf988c {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1574.706137] env[61663]: DEBUG oslo_concurrency.lockutils [req-481a0b1b-809c-46f8-9111-13732b484a07 req-3d81e215-914d-4724-8d07-9ca87be3c447 service nova] Acquiring lock "47e1551e-ac80-4b4e-b568-3931c6dcf3b3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1574.706352] env[61663]: DEBUG oslo_concurrency.lockutils [req-481a0b1b-809c-46f8-9111-13732b484a07 req-3d81e215-914d-4724-8d07-9ca87be3c447 service nova] Lock "47e1551e-ac80-4b4e-b568-3931c6dcf3b3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.706608] env[61663]: DEBUG oslo_concurrency.lockutils [req-481a0b1b-809c-46f8-9111-13732b484a07 req-3d81e215-914d-4724-8d07-9ca87be3c447 service nova] Lock "47e1551e-ac80-4b4e-b568-3931c6dcf3b3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.706608] env[61663]: DEBUG nova.compute.manager [req-481a0b1b-809c-46f8-9111-13732b484a07 req-3d81e215-914d-4724-8d07-9ca87be3c447 service nova] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] No waiting events found dispatching network-vif-plugged-884ec50a-a9f5-4eb6-b21c-03d4cadf988c {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1574.706758] env[61663]: WARNING nova.compute.manager [req-481a0b1b-809c-46f8-9111-13732b484a07 req-3d81e215-914d-4724-8d07-9ca87be3c447 service nova] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Received unexpected event network-vif-plugged-884ec50a-a9f5-4eb6-b21c-03d4cadf988c for instance with vm_state building and task_state spawning. [ 1574.711220] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Created folder: Instances in parent group-v352626. [ 1574.711220] env[61663]: DEBUG oslo.service.loopingcall [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1574.711220] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1574.711220] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7557828-85a6-4979-b89b-0a6f5a107c03 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.730313] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1574.730313] env[61663]: value = "task-1690739" [ 1574.730313] env[61663]: _type = "Task" [ 1574.730313] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.740702] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690739, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.241600] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690739, 'name': CreateVM_Task, 'duration_secs': 0.308026} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.241993] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1575.242849] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1575.243312] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1575.243755] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1575.244132] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0b61a4b-15b0-430b-b7e2-2bc1ab727013 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.251021] env[61663]: DEBUG oslo_vmware.api [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Waiting for the task: (returnval){ [ 1575.251021] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]524cb5f9-1a35-170d-6482-340a11ad3a31" [ 1575.251021] env[61663]: _type = "Task" [ 1575.251021] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.268380] env[61663]: DEBUG oslo_vmware.api [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]524cb5f9-1a35-170d-6482-340a11ad3a31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.762034] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1575.762523] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1575.764139] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1576.808378] env[61663]: DEBUG nova.compute.manager [req-89b4e6be-3d47-4a33-83de-e1c327a67ea8 req-38a8ecc1-b61e-401c-a6d2-efd3469ea1ba service nova] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Received event network-changed-884ec50a-a9f5-4eb6-b21c-03d4cadf988c {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1576.808630] env[61663]: DEBUG nova.compute.manager [req-89b4e6be-3d47-4a33-83de-e1c327a67ea8 req-38a8ecc1-b61e-401c-a6d2-efd3469ea1ba service nova] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Refreshing instance network info cache due to event network-changed-884ec50a-a9f5-4eb6-b21c-03d4cadf988c. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1576.808791] env[61663]: DEBUG oslo_concurrency.lockutils [req-89b4e6be-3d47-4a33-83de-e1c327a67ea8 req-38a8ecc1-b61e-401c-a6d2-efd3469ea1ba service nova] Acquiring lock "refresh_cache-47e1551e-ac80-4b4e-b568-3931c6dcf3b3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1576.808925] env[61663]: DEBUG oslo_concurrency.lockutils [req-89b4e6be-3d47-4a33-83de-e1c327a67ea8 req-38a8ecc1-b61e-401c-a6d2-efd3469ea1ba service nova] Acquired lock "refresh_cache-47e1551e-ac80-4b4e-b568-3931c6dcf3b3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1576.809100] env[61663]: DEBUG nova.network.neutron [req-89b4e6be-3d47-4a33-83de-e1c327a67ea8 req-38a8ecc1-b61e-401c-a6d2-efd3469ea1ba service nova] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Refreshing network info cache for port 884ec50a-a9f5-4eb6-b21c-03d4cadf988c {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1577.220321] env[61663]: DEBUG nova.network.neutron [req-89b4e6be-3d47-4a33-83de-e1c327a67ea8 req-38a8ecc1-b61e-401c-a6d2-efd3469ea1ba service nova] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Updated VIF entry in instance network info cache for port 884ec50a-a9f5-4eb6-b21c-03d4cadf988c. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1577.220743] env[61663]: DEBUG nova.network.neutron [req-89b4e6be-3d47-4a33-83de-e1c327a67ea8 req-38a8ecc1-b61e-401c-a6d2-efd3469ea1ba service nova] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Updating instance_info_cache with network_info: [{"id": "884ec50a-a9f5-4eb6-b21c-03d4cadf988c", "address": "fa:16:3e:06:28:78", "network": {"id": "729ac288-d65a-4e40-832d-a5e257f6c907", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1730267512-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9945c1e1dda04f3eb84b3e34072ec0be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c09cc47-a7d0-4816-bee4-69cc9f2e04b0", "external-id": "nsx-vlan-transportzone-687", "segmentation_id": 687, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap884ec50a-a9", "ovs_interfaceid": "884ec50a-a9f5-4eb6-b21c-03d4cadf988c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1577.236228] env[61663]: DEBUG oslo_concurrency.lockutils [req-89b4e6be-3d47-4a33-83de-e1c327a67ea8 req-38a8ecc1-b61e-401c-a6d2-efd3469ea1ba service nova] Releasing lock "refresh_cache-47e1551e-ac80-4b4e-b568-3931c6dcf3b3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1578.338065] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0e43f8ae-3cca-4050-a395-f25865695c6a tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Acquiring lock "47e1551e-ac80-4b4e-b568-3931c6dcf3b3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1581.140023] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Acquiring lock "768bef02-a114-4cac-a614-6e8a04ce0d18" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1581.140370] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Lock "768bef02-a114-4cac-a614-6e8a04ce0d18" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.692799] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1588.693174] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1588.693355] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1588.716161] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1588.716344] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1588.716480] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1588.716650] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1588.716730] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1588.716850] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1588.716976] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 04488672-86c4-415b-961e-94641d570112] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1588.717114] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1588.717241] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1588.717359] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1588.717480] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1588.717998] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1591.692210] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1591.692517] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1591.692631] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1591.705328] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.705556] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.705722] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.705891] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1591.707033] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7c2fb8-a2da-432a-aa1a-771bdea125e0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.715977] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0392c13b-3e36-4c5c-ba4c-c1ffa8cce52b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.729736] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad01432-d152-4e24-99a3-cc109e3acef1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.736277] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d03b1942-65b4-4fd8-b04e-80558b64a79e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.766406] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181339MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1591.766579] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.766750] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.839777] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 790791ee-4e6c-4116-8ade-ba61f55ebd4d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1591.839966] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance f9a675b6-e76d-492b-ac34-3c7b10553fca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1591.840121] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1591.840256] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 6f7a3a1f-859d-42f5-b986-6a1a038ca536 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1591.840381] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1591.840496] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ee0e3e54-c135-489f-87ca-f441efebcbd5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1591.840618] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 04488672-86c4-415b-961e-94641d570112 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1591.840737] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 7a7a0ef0-bbea-42c0-b96e-4efc4207a655 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1591.840853] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 1305216b-0ee5-499a-a82a-30b45a8c832c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1591.841015] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 47e1551e-ac80-4b4e-b568-3931c6dcf3b3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1591.852928] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1591.864176] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 87c4b17f-9890-44fe-9974-0f6c45e316d3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1591.874861] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 39380f25-15a5-4d8f-b38b-39e1b3561314 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1591.886028] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ef8528db-1338-4af6-9d4a-5eda7fe69a98 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1591.896595] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 536c3f6e-757d-4b59-bf82-c01f735746d3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1591.906525] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 058b6485-898e-4799-899a-df5297144271 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1591.917140] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance f2a87111-4361-4e0a-940c-3c163c2d5e72 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1591.928116] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 26e22311-811c-49cf-b2df-40822f2e4f3c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1591.939493] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance f71fce22-f27b-4e5d-94e4-697d09377ed1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1591.950828] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 06030fd6-0e35-42dc-bd66-cfc95930e90a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1591.963313] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 65cdd238-4875-4dad-9df0-0aeda65ab9ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1591.976014] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance f3d817b9-1a93-4fb8-b25c-756de9152f17 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1591.987846] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5c76183a-cdcb-49e7-95b9-75a635352479 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1591.997391] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 14682ec1-2d3f-4601-a48e-832e7f2072d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1592.007298] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 94f7665c-5247-4474-a9ea-700f1778af81 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1592.017549] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 768bef02-a114-4cac-a614-6e8a04ce0d18 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1592.017790] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1592.018169] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1592.330521] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-484b7498-3b3b-4e27-b624-cc971ac1cf47 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.339333] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1234723-e034-481c-bea7-d91a1b2d7a62 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.370339] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51b879b1-97c6-4113-8f41-efc473a58e61 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.377354] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2566639-1246-4f21-9dab-84c9cebd57b1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.390027] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1592.398366] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1592.412837] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1592.413041] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.646s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1595.413317] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1595.692359] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1598.692015] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1598.692344] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1599.692466] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1619.195345] env[61663]: WARNING oslo_vmware.rw_handles [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1619.195345] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1619.195345] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1619.195345] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1619.195345] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1619.195345] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 1619.195345] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1619.195345] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1619.195345] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1619.195345] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1619.195345] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1619.195345] env[61663]: ERROR oslo_vmware.rw_handles [ 1619.196108] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/b1e7f8f9-ef2e-4778-b981-6492c8daaf60/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1619.197921] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1619.198196] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Copying Virtual Disk [datastore1] vmware_temp/b1e7f8f9-ef2e-4778-b981-6492c8daaf60/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/b1e7f8f9-ef2e-4778-b981-6492c8daaf60/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1619.198497] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-01a9c182-47e2-4a34-b749-3fb8870ef655 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.207134] env[61663]: DEBUG oslo_vmware.api [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Waiting for the task: (returnval){ [ 1619.207134] env[61663]: value = "task-1690740" [ 1619.207134] env[61663]: _type = "Task" [ 1619.207134] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.215946] env[61663]: DEBUG oslo_vmware.api [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Task: {'id': task-1690740, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.717779] env[61663]: DEBUG oslo_vmware.exceptions [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1619.718217] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1619.718820] env[61663]: ERROR nova.compute.manager [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1619.718820] env[61663]: Faults: ['InvalidArgument'] [ 1619.718820] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Traceback (most recent call last): [ 1619.718820] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1619.718820] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] yield resources [ 1619.718820] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1619.718820] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] self.driver.spawn(context, instance, image_meta, [ 1619.718820] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1619.718820] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1619.718820] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1619.718820] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] self._fetch_image_if_missing(context, vi) [ 1619.718820] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1619.719223] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] image_cache(vi, tmp_image_ds_loc) [ 1619.719223] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1619.719223] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] vm_util.copy_virtual_disk( [ 1619.719223] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1619.719223] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] session._wait_for_task(vmdk_copy_task) [ 1619.719223] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1619.719223] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] return self.wait_for_task(task_ref) [ 1619.719223] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1619.719223] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] return evt.wait() [ 1619.719223] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1619.719223] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] result = hub.switch() [ 1619.719223] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1619.719223] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] return self.greenlet.switch() [ 1619.719586] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1619.719586] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] self.f(*self.args, **self.kw) [ 1619.719586] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1619.719586] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] raise exceptions.translate_fault(task_info.error) [ 1619.719586] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1619.719586] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Faults: ['InvalidArgument'] [ 1619.719586] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] [ 1619.719586] env[61663]: INFO nova.compute.manager [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Terminating instance [ 1619.721193] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1619.721474] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1619.721761] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba39781b-2f94-4dd0-81dd-8819e9c28932 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.724113] env[61663]: DEBUG nova.compute.manager [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1619.724457] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1619.725285] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3249248b-3655-4740-a43e-1d954ff4c331 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.733632] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1619.733937] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-974dfb5a-a2b7-459c-bca7-c24ad33afa28 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.736141] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1619.736374] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1619.737327] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87c9007f-2199-479c-95f4-8a57467eba93 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.742219] env[61663]: DEBUG oslo_vmware.api [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Waiting for the task: (returnval){ [ 1619.742219] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528d2817-e7b9-0df1-5d97-b9f845c73f0b" [ 1619.742219] env[61663]: _type = "Task" [ 1619.742219] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.749648] env[61663]: DEBUG oslo_vmware.api [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528d2817-e7b9-0df1-5d97-b9f845c73f0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.832381] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1619.832604] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1619.832801] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Deleting the datastore file [datastore1] 790791ee-4e6c-4116-8ade-ba61f55ebd4d {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1619.833080] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1844520-099b-4583-94e7-eea99e059c48 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.839740] env[61663]: DEBUG oslo_vmware.api [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Waiting for the task: (returnval){ [ 1619.839740] env[61663]: value = "task-1690742" [ 1619.839740] env[61663]: _type = "Task" [ 1619.839740] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.847050] env[61663]: DEBUG oslo_vmware.api [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Task: {'id': task-1690742, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.253738] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1620.254135] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Creating directory with path [datastore1] vmware_temp/b0555d28-d63e-4232-99ad-c590ac916bd9/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1620.254135] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a23618d-572c-45dc-8032-274ca827d19f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.267478] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Created directory with path [datastore1] vmware_temp/b0555d28-d63e-4232-99ad-c590ac916bd9/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1620.267478] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Fetch image to [datastore1] vmware_temp/b0555d28-d63e-4232-99ad-c590ac916bd9/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1620.267636] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/b0555d28-d63e-4232-99ad-c590ac916bd9/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1620.268500] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffbd1300-a941-4079-9b40-af42fda7698b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.276019] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-989aa7ee-76a4-49c5-8e65-924ff58b2545 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.285987] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a9d94b-a6d7-4328-9b5e-0fd019f93868 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.317046] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b94e8b2-0f04-4e72-a6ec-2cb2665efbeb {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.323251] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cfaf7e14-e057-4b93-99cd-55620b893048 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.345204] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1620.351715] env[61663]: DEBUG oslo_vmware.api [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Task: {'id': task-1690742, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.085588} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.351953] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1620.352161] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1620.352337] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1620.352513] env[61663]: INFO nova.compute.manager [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1620.356781] env[61663]: DEBUG nova.compute.claims [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1620.356956] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1620.357190] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.400132] env[61663]: DEBUG oslo_vmware.rw_handles [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b0555d28-d63e-4232-99ad-c590ac916bd9/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1620.464777] env[61663]: DEBUG oslo_vmware.rw_handles [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1620.465422] env[61663]: DEBUG oslo_vmware.rw_handles [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b0555d28-d63e-4232-99ad-c590ac916bd9/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1620.729445] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0bf4aed-a600-4025-857a-f50072d2e519 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.738112] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02eb8dea-9176-4ce2-971e-7ecf05df16b3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.766632] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1da4e45-d5c8-4336-ad0d-d369944aac63 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.773405] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d1e312-f940-4c02-975b-2de7007fc24b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.785759] env[61663]: DEBUG nova.compute.provider_tree [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1620.794618] env[61663]: DEBUG nova.scheduler.client.report [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1620.807955] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.451s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1620.808502] env[61663]: ERROR nova.compute.manager [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1620.808502] env[61663]: Faults: ['InvalidArgument'] [ 1620.808502] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Traceback (most recent call last): [ 1620.808502] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1620.808502] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] self.driver.spawn(context, instance, image_meta, [ 1620.808502] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1620.808502] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1620.808502] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1620.808502] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] self._fetch_image_if_missing(context, vi) [ 1620.808502] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1620.808502] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] image_cache(vi, tmp_image_ds_loc) [ 1620.808502] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1620.808910] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] vm_util.copy_virtual_disk( [ 1620.808910] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1620.808910] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] session._wait_for_task(vmdk_copy_task) [ 1620.808910] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1620.808910] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] return self.wait_for_task(task_ref) [ 1620.808910] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1620.808910] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] return evt.wait() [ 1620.808910] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1620.808910] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] result = hub.switch() [ 1620.808910] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1620.808910] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] return self.greenlet.switch() [ 1620.808910] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1620.808910] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] self.f(*self.args, **self.kw) [ 1620.809188] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1620.809188] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] raise exceptions.translate_fault(task_info.error) [ 1620.809188] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1620.809188] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Faults: ['InvalidArgument'] [ 1620.809188] env[61663]: ERROR nova.compute.manager [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] [ 1620.809314] env[61663]: DEBUG nova.compute.utils [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1620.810629] env[61663]: DEBUG nova.compute.manager [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Build of instance 790791ee-4e6c-4116-8ade-ba61f55ebd4d was re-scheduled: A specified parameter was not correct: fileType [ 1620.810629] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1620.811051] env[61663]: DEBUG nova.compute.manager [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1620.811231] env[61663]: DEBUG nova.compute.manager [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1620.811503] env[61663]: DEBUG nova.compute.manager [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1620.811556] env[61663]: DEBUG nova.network.neutron [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1621.320343] env[61663]: DEBUG nova.network.neutron [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1621.330461] env[61663]: INFO nova.compute.manager [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 790791ee-4e6c-4116-8ade-ba61f55ebd4d] Took 0.52 seconds to deallocate network for instance. [ 1621.438478] env[61663]: INFO nova.scheduler.client.report [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Deleted allocations for instance 790791ee-4e6c-4116-8ade-ba61f55ebd4d [ 1621.463442] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3ddae3bf-ee9b-4072-a825-b9c1e0d93ebc tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Lock "790791ee-4e6c-4116-8ade-ba61f55ebd4d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 374.670s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1621.475668] env[61663]: DEBUG nova.compute.manager [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1621.535754] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1621.536092] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1621.538897] env[61663]: INFO nova.compute.claims [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1621.887565] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b12c726-df67-4990-ae85-4d85f8750d11 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.895430] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba00ab0-0579-4790-8c3b-896bfa6c850a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.924924] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5172eaa-b1c0-4857-9596-ddd3fc99f7cd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.932117] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a30b2ee0-b331-4998-9f7d-3ac31e7b5063 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.945346] env[61663]: DEBUG nova.compute.provider_tree [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1621.953971] env[61663]: DEBUG nova.scheduler.client.report [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1621.968531] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.432s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1621.969077] env[61663]: DEBUG nova.compute.manager [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1622.005395] env[61663]: DEBUG nova.compute.utils [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1622.006705] env[61663]: DEBUG nova.compute.manager [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1622.006929] env[61663]: DEBUG nova.network.neutron [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1622.015757] env[61663]: DEBUG nova.compute.manager [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1622.078603] env[61663]: DEBUG nova.compute.manager [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1622.105508] env[61663]: DEBUG nova.virt.hardware [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1622.105841] env[61663]: DEBUG nova.virt.hardware [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1622.105914] env[61663]: DEBUG nova.virt.hardware [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1622.106112] env[61663]: DEBUG nova.virt.hardware [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1622.106267] env[61663]: DEBUG nova.virt.hardware [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1622.106415] env[61663]: DEBUG nova.virt.hardware [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1622.106623] env[61663]: DEBUG nova.virt.hardware [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1622.106784] env[61663]: DEBUG nova.virt.hardware [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1622.106947] env[61663]: DEBUG nova.virt.hardware [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1622.107149] env[61663]: DEBUG nova.virt.hardware [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1622.107343] env[61663]: DEBUG nova.virt.hardware [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1622.108331] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-178139ae-b612-480e-ab21-11d1feecea62 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.116374] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ed762bd-109c-4854-a770-e955c1e8a64b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.157320] env[61663]: DEBUG nova.policy [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0c23ced30f39496ba1c875aa48f5a44d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f5179b3fdb634c1b804241610de771d4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1622.782198] env[61663]: DEBUG nova.network.neutron [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Successfully created port: 8501ab77-6158-4cec-a51f-0e75ef4e7595 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1624.203556] env[61663]: DEBUG nova.network.neutron [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Successfully updated port: 8501ab77-6158-4cec-a51f-0e75ef4e7595 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1624.213165] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Acquiring lock "refresh_cache-689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1624.213324] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Acquired lock "refresh_cache-689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.213476] env[61663]: DEBUG nova.network.neutron [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1624.294048] env[61663]: DEBUG nova.network.neutron [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1624.314168] env[61663]: DEBUG nova.compute.manager [req-bb6761d6-cf85-4ed7-bdfa-0ece0e10d806 req-e6c1cb98-72f9-45ef-baf6-2bd28800ec36 service nova] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Received event network-vif-plugged-8501ab77-6158-4cec-a51f-0e75ef4e7595 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1624.314168] env[61663]: DEBUG oslo_concurrency.lockutils [req-bb6761d6-cf85-4ed7-bdfa-0ece0e10d806 req-e6c1cb98-72f9-45ef-baf6-2bd28800ec36 service nova] Acquiring lock "689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.314168] env[61663]: DEBUG oslo_concurrency.lockutils [req-bb6761d6-cf85-4ed7-bdfa-0ece0e10d806 req-e6c1cb98-72f9-45ef-baf6-2bd28800ec36 service nova] Lock "689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.314952] env[61663]: DEBUG oslo_concurrency.lockutils [req-bb6761d6-cf85-4ed7-bdfa-0ece0e10d806 req-e6c1cb98-72f9-45ef-baf6-2bd28800ec36 service nova] Lock "689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.315374] env[61663]: DEBUG nova.compute.manager [req-bb6761d6-cf85-4ed7-bdfa-0ece0e10d806 req-e6c1cb98-72f9-45ef-baf6-2bd28800ec36 service nova] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] No waiting events found dispatching network-vif-plugged-8501ab77-6158-4cec-a51f-0e75ef4e7595 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1624.315695] env[61663]: WARNING nova.compute.manager [req-bb6761d6-cf85-4ed7-bdfa-0ece0e10d806 req-e6c1cb98-72f9-45ef-baf6-2bd28800ec36 service nova] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Received unexpected event network-vif-plugged-8501ab77-6158-4cec-a51f-0e75ef4e7595 for instance with vm_state building and task_state spawning. [ 1624.638242] env[61663]: DEBUG nova.network.neutron [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Updating instance_info_cache with network_info: [{"id": "8501ab77-6158-4cec-a51f-0e75ef4e7595", "address": "fa:16:3e:18:b9:01", "network": {"id": "42ed112c-bbbe-4843-b59c-6ca4f1421c97", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.95", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cafa379ce6b143b88e4741a849af1088", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8501ab77-61", "ovs_interfaceid": "8501ab77-6158-4cec-a51f-0e75ef4e7595", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1624.651354] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Releasing lock "refresh_cache-689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1624.651652] env[61663]: DEBUG nova.compute.manager [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Instance network_info: |[{"id": "8501ab77-6158-4cec-a51f-0e75ef4e7595", "address": "fa:16:3e:18:b9:01", "network": {"id": "42ed112c-bbbe-4843-b59c-6ca4f1421c97", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.95", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cafa379ce6b143b88e4741a849af1088", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8501ab77-61", "ovs_interfaceid": "8501ab77-6158-4cec-a51f-0e75ef4e7595", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1624.652092] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:b9:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8501ab77-6158-4cec-a51f-0e75ef4e7595', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1624.659354] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Creating folder: Project (f5179b3fdb634c1b804241610de771d4). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1624.659884] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f14a12a-199a-4b2d-8240-25ff675f3e3a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.671613] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Created folder: Project (f5179b3fdb634c1b804241610de771d4) in parent group-v352575. [ 1624.671915] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Creating folder: Instances. Parent ref: group-v352629. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1624.672035] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4682dc36-1c84-4621-b6c1-ce44160a6109 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.681094] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Created folder: Instances in parent group-v352629. [ 1624.681395] env[61663]: DEBUG oslo.service.loopingcall [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1624.681587] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1624.681778] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4e73a2d7-3058-42e2-9835-0335910b39bd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.701816] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1624.701816] env[61663]: value = "task-1690745" [ 1624.701816] env[61663]: _type = "Task" [ 1624.701816] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.710223] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690745, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.211944] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690745, 'name': CreateVM_Task, 'duration_secs': 0.301297} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.212255] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1625.212849] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1625.213016] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1625.213351] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1625.213605] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-557cbc6b-e308-46c9-8305-20901dcf6024 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.218492] env[61663]: DEBUG oslo_vmware.api [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Waiting for the task: (returnval){ [ 1625.218492] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f37a4b-9695-2b98-2838-aa245626cf6d" [ 1625.218492] env[61663]: _type = "Task" [ 1625.218492] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.227216] env[61663]: DEBUG oslo_vmware.api [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f37a4b-9695-2b98-2838-aa245626cf6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.729343] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.729683] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1625.729924] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1626.350775] env[61663]: DEBUG nova.compute.manager [req-d5baa887-b977-4f42-9480-4c511d757dd9 req-07dc17a3-1b75-4eaa-96dd-0a75ee15a1d3 service nova] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Received event network-changed-8501ab77-6158-4cec-a51f-0e75ef4e7595 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1626.351209] env[61663]: DEBUG nova.compute.manager [req-d5baa887-b977-4f42-9480-4c511d757dd9 req-07dc17a3-1b75-4eaa-96dd-0a75ee15a1d3 service nova] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Refreshing instance network info cache due to event network-changed-8501ab77-6158-4cec-a51f-0e75ef4e7595. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1626.351209] env[61663]: DEBUG oslo_concurrency.lockutils [req-d5baa887-b977-4f42-9480-4c511d757dd9 req-07dc17a3-1b75-4eaa-96dd-0a75ee15a1d3 service nova] Acquiring lock "refresh_cache-689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1626.351313] env[61663]: DEBUG oslo_concurrency.lockutils [req-d5baa887-b977-4f42-9480-4c511d757dd9 req-07dc17a3-1b75-4eaa-96dd-0a75ee15a1d3 service nova] Acquired lock "refresh_cache-689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1626.351460] env[61663]: DEBUG nova.network.neutron [req-d5baa887-b977-4f42-9480-4c511d757dd9 req-07dc17a3-1b75-4eaa-96dd-0a75ee15a1d3 service nova] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Refreshing network info cache for port 8501ab77-6158-4cec-a51f-0e75ef4e7595 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1626.766242] env[61663]: DEBUG nova.network.neutron [req-d5baa887-b977-4f42-9480-4c511d757dd9 req-07dc17a3-1b75-4eaa-96dd-0a75ee15a1d3 service nova] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Updated VIF entry in instance network info cache for port 8501ab77-6158-4cec-a51f-0e75ef4e7595. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1626.766611] env[61663]: DEBUG nova.network.neutron [req-d5baa887-b977-4f42-9480-4c511d757dd9 req-07dc17a3-1b75-4eaa-96dd-0a75ee15a1d3 service nova] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Updating instance_info_cache with network_info: [{"id": "8501ab77-6158-4cec-a51f-0e75ef4e7595", "address": "fa:16:3e:18:b9:01", "network": {"id": "42ed112c-bbbe-4843-b59c-6ca4f1421c97", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.95", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "cafa379ce6b143b88e4741a849af1088", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8501ab77-61", "ovs_interfaceid": "8501ab77-6158-4cec-a51f-0e75ef4e7595", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1626.775938] env[61663]: DEBUG oslo_concurrency.lockutils [req-d5baa887-b977-4f42-9480-4c511d757dd9 req-07dc17a3-1b75-4eaa-96dd-0a75ee15a1d3 service nova] Releasing lock "refresh_cache-689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1627.301588] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ace605d5-bebb-4e96-bd07-e1cb8e13789c tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Acquiring lock "689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1648.693533] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1649.692247] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1649.692501] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1649.692642] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1649.718250] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1649.718519] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1649.718586] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1649.718679] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1649.718833] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1649.718919] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 04488672-86c4-415b-961e-94641d570112] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1649.719799] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1649.720200] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1649.720200] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1649.720320] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1649.720374] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1651.468587] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "46ec5076-51f1-4ac9-915e-0d98ee7b1d4c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.468912] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "46ec5076-51f1-4ac9-915e-0d98ee7b1d4c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.693688] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1652.688264] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1653.692132] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1653.707578] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1653.708172] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.708172] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.708550] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1653.709807] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c7f31bb-d6b5-4ff7-ace8-07bd7dcd1e6f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.722124] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a9e8176-6cd1-4545-b1d2-b74026196743 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.736154] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfdade3a-36c1-4a0e-ade3-cb581c1b3bfc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.743101] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93df1861-7053-4fd1-a90c-f2f4b952d2c1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.786577] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181330MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1653.786778] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1653.787032] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.881229] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance f9a675b6-e76d-492b-ac34-3c7b10553fca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1653.881229] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1653.881229] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 6f7a3a1f-859d-42f5-b986-6a1a038ca536 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1653.881229] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1653.881364] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ee0e3e54-c135-489f-87ca-f441efebcbd5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1653.881364] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 04488672-86c4-415b-961e-94641d570112 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1653.881364] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 7a7a0ef0-bbea-42c0-b96e-4efc4207a655 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1653.881364] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 1305216b-0ee5-499a-a82a-30b45a8c832c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1653.881473] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 47e1551e-ac80-4b4e-b568-3931c6dcf3b3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1653.881473] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1653.893379] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 87c4b17f-9890-44fe-9974-0f6c45e316d3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1653.905770] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 39380f25-15a5-4d8f-b38b-39e1b3561314 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1653.916932] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ef8528db-1338-4af6-9d4a-5eda7fe69a98 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1653.932241] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 536c3f6e-757d-4b59-bf82-c01f735746d3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1653.946826] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 058b6485-898e-4799-899a-df5297144271 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1653.964208] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance f2a87111-4361-4e0a-940c-3c163c2d5e72 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1653.978788] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 26e22311-811c-49cf-b2df-40822f2e4f3c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1653.995213] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance f71fce22-f27b-4e5d-94e4-697d09377ed1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1654.012782] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 06030fd6-0e35-42dc-bd66-cfc95930e90a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1654.027600] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 65cdd238-4875-4dad-9df0-0aeda65ab9ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1654.040448] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance f3d817b9-1a93-4fb8-b25c-756de9152f17 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1654.054516] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5c76183a-cdcb-49e7-95b9-75a635352479 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1654.065417] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 14682ec1-2d3f-4601-a48e-832e7f2072d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1654.080753] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 94f7665c-5247-4474-a9ea-700f1778af81 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1654.094389] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 768bef02-a114-4cac-a614-6e8a04ce0d18 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1654.112873] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1654.113244] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1654.113455] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1654.578986] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a1d9a2-806c-4374-ae5e-e66b2954a682 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.588259] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4337ccc-0a3e-4549-af3b-19c5a827a21b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.625284] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c32fd39-66df-4e10-b475-9d54dfaa0c38 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.636824] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36071583-1ccd-476e-9e1b-a6d3dd79a80f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.650846] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1654.683202] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1654.700175] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1654.700175] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.913s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.701442] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1656.702337] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1656.931871] env[61663]: DEBUG oslo_concurrency.lockutils [None req-bd6de649-c90a-41f0-897f-c87d7fe10e2e tempest-ServerAddressesNegativeTestJSON-2016992940 tempest-ServerAddressesNegativeTestJSON-2016992940-project-member] Acquiring lock "76a61ec9-99cb-4371-9e7e-dc206c0a9d3d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.931871] env[61663]: DEBUG oslo_concurrency.lockutils [None req-bd6de649-c90a-41f0-897f-c87d7fe10e2e tempest-ServerAddressesNegativeTestJSON-2016992940 tempest-ServerAddressesNegativeTestJSON-2016992940-project-member] Lock "76a61ec9-99cb-4371-9e7e-dc206c0a9d3d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1658.692683] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1658.693017] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1659.690878] env[61663]: DEBUG oslo_concurrency.lockutils [None req-38d90fad-8aec-4730-a31a-c9562e473210 tempest-FloatingIPsAssociationNegativeTestJSON-2134864087 tempest-FloatingIPsAssociationNegativeTestJSON-2134864087-project-member] Acquiring lock "a63f3aaf-9a32-4782-94db-bfbbbd094530" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1659.691576] env[61663]: DEBUG oslo_concurrency.lockutils [None req-38d90fad-8aec-4730-a31a-c9562e473210 tempest-FloatingIPsAssociationNegativeTestJSON-2134864087 tempest-FloatingIPsAssociationNegativeTestJSON-2134864087-project-member] Lock "a63f3aaf-9a32-4782-94db-bfbbbd094530" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1659.692603] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1662.687775] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1664.113971] env[61663]: DEBUG oslo_concurrency.lockutils [None req-760acb0b-205a-4de7-a63f-ca1dc75b6d7a tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Acquiring lock "4cc0a356-4bc7-4713-87af-5c5c7cc792d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1664.114309] env[61663]: DEBUG oslo_concurrency.lockutils [None req-760acb0b-205a-4de7-a63f-ca1dc75b6d7a tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Lock "4cc0a356-4bc7-4713-87af-5c5c7cc792d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1665.331176] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e8d01236-f3c0-4a6c-9f03-3fa99139f420 tempest-AttachInterfacesTestJSON-1267786142 tempest-AttachInterfacesTestJSON-1267786142-project-member] Acquiring lock "5ee3346f-50bf-464e-a4dd-afd1edd0052a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1665.331410] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e8d01236-f3c0-4a6c-9f03-3fa99139f420 tempest-AttachInterfacesTestJSON-1267786142 tempest-AttachInterfacesTestJSON-1267786142-project-member] Lock "5ee3346f-50bf-464e-a4dd-afd1edd0052a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.239299] env[61663]: WARNING oslo_vmware.rw_handles [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1668.239299] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1668.239299] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1668.239299] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1668.239299] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1668.239299] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 1668.239299] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1668.239299] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1668.239299] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1668.239299] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1668.239299] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1668.239299] env[61663]: ERROR oslo_vmware.rw_handles [ 1668.239845] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/b0555d28-d63e-4232-99ad-c590ac916bd9/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1668.242032] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1668.242345] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Copying Virtual Disk [datastore1] vmware_temp/b0555d28-d63e-4232-99ad-c590ac916bd9/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/b0555d28-d63e-4232-99ad-c590ac916bd9/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1668.242667] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45b4cb79-615e-410c-bc29-e89afc33dbac {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.251588] env[61663]: DEBUG oslo_vmware.api [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Waiting for the task: (returnval){ [ 1668.251588] env[61663]: value = "task-1690749" [ 1668.251588] env[61663]: _type = "Task" [ 1668.251588] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.260355] env[61663]: DEBUG oslo_vmware.api [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Task: {'id': task-1690749, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.763049] env[61663]: DEBUG oslo_vmware.exceptions [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1668.763366] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1668.763919] env[61663]: ERROR nova.compute.manager [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1668.763919] env[61663]: Faults: ['InvalidArgument'] [ 1668.763919] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Traceback (most recent call last): [ 1668.763919] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1668.763919] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] yield resources [ 1668.763919] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1668.763919] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] self.driver.spawn(context, instance, image_meta, [ 1668.763919] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1668.763919] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1668.763919] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1668.763919] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] self._fetch_image_if_missing(context, vi) [ 1668.763919] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1668.764384] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] image_cache(vi, tmp_image_ds_loc) [ 1668.764384] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1668.764384] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] vm_util.copy_virtual_disk( [ 1668.764384] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1668.764384] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] session._wait_for_task(vmdk_copy_task) [ 1668.764384] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1668.764384] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] return self.wait_for_task(task_ref) [ 1668.764384] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1668.764384] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] return evt.wait() [ 1668.764384] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1668.764384] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] result = hub.switch() [ 1668.764384] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1668.764384] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] return self.greenlet.switch() [ 1668.764769] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1668.764769] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] self.f(*self.args, **self.kw) [ 1668.764769] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1668.764769] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] raise exceptions.translate_fault(task_info.error) [ 1668.764769] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1668.764769] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Faults: ['InvalidArgument'] [ 1668.764769] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] [ 1668.764769] env[61663]: INFO nova.compute.manager [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Terminating instance [ 1668.765972] env[61663]: DEBUG oslo_concurrency.lockutils [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1668.766191] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1668.766852] env[61663]: DEBUG nova.compute.manager [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1668.767062] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1668.767294] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be120dc7-5176-4c61-85a3-d5d48b2da038 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.770216] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a904d4e8-6c31-4ad4-ab5f-f7a6c457aab9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.777086] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1668.777307] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-37342207-982d-4974-8eb6-c503378c7afd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.785725] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1668.785917] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1668.786636] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf8c5c6c-3b07-4602-a0de-da5eb31e55c6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.791793] env[61663]: DEBUG oslo_vmware.api [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Waiting for the task: (returnval){ [ 1668.791793] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52038b21-13a0-bb52-279e-1a120ce8dee7" [ 1668.791793] env[61663]: _type = "Task" [ 1668.791793] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.799050] env[61663]: DEBUG oslo_vmware.api [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52038b21-13a0-bb52-279e-1a120ce8dee7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.866782] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1668.867114] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1668.867406] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Deleting the datastore file [datastore1] f9a675b6-e76d-492b-ac34-3c7b10553fca {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1668.867693] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-36e0ab7b-31af-4f61-836b-160488ee132f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.874214] env[61663]: DEBUG oslo_vmware.api [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Waiting for the task: (returnval){ [ 1668.874214] env[61663]: value = "task-1690752" [ 1668.874214] env[61663]: _type = "Task" [ 1668.874214] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.882064] env[61663]: DEBUG oslo_vmware.api [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Task: {'id': task-1690752, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.302694] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1669.302985] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Creating directory with path [datastore1] vmware_temp/4faad1d3-4a3d-46a9-aed7-dd33b7d12863/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1669.303201] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b91fdbf4-7692-4502-9473-a80bb21200ce {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.315035] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Created directory with path [datastore1] vmware_temp/4faad1d3-4a3d-46a9-aed7-dd33b7d12863/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1669.315257] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Fetch image to [datastore1] vmware_temp/4faad1d3-4a3d-46a9-aed7-dd33b7d12863/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1669.315432] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/4faad1d3-4a3d-46a9-aed7-dd33b7d12863/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1669.316205] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-464cb8e5-c609-4c06-9c9b-3e0514fd1d74 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.326383] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b01ed25-59e1-49a5-a0a9-bd44386cd561 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.338893] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13271332-da86-42ec-95ec-53613336ffb6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.372701] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e142069-d333-4425-bb8f-bada1a52d253 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.383482] env[61663]: DEBUG oslo_vmware.api [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Task: {'id': task-1690752, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201194} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.385027] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1669.385207] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1669.385406] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1669.385596] env[61663]: INFO nova.compute.manager [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1669.387566] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-10116d44-7d2f-4e4c-84b9-73ba3295998c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.389465] env[61663]: DEBUG nova.compute.claims [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1669.389637] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.389857] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.410539] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1669.461274] env[61663]: DEBUG oslo_vmware.rw_handles [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4faad1d3-4a3d-46a9-aed7-dd33b7d12863/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1669.525105] env[61663]: DEBUG oslo_vmware.rw_handles [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1669.525266] env[61663]: DEBUG oslo_vmware.rw_handles [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4faad1d3-4a3d-46a9-aed7-dd33b7d12863/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1669.828506] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0285386-0c7d-4f9a-a4e3-333964db943e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.836243] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29449d35-d759-4a3c-a2b1-4885415da497 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.867023] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048f19ad-5c31-49a8-b9f9-23929d920282 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.874620] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe538dc-3756-4a1e-b169-66b2a0770f81 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.887172] env[61663]: DEBUG nova.compute.provider_tree [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1669.896980] env[61663]: DEBUG nova.scheduler.client.report [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1669.912838] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.523s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.913140] env[61663]: ERROR nova.compute.manager [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1669.913140] env[61663]: Faults: ['InvalidArgument'] [ 1669.913140] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Traceback (most recent call last): [ 1669.913140] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1669.913140] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] self.driver.spawn(context, instance, image_meta, [ 1669.913140] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1669.913140] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1669.913140] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1669.913140] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] self._fetch_image_if_missing(context, vi) [ 1669.913140] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1669.913140] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] image_cache(vi, tmp_image_ds_loc) [ 1669.913140] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1669.913512] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] vm_util.copy_virtual_disk( [ 1669.913512] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1669.913512] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] session._wait_for_task(vmdk_copy_task) [ 1669.913512] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1669.913512] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] return self.wait_for_task(task_ref) [ 1669.913512] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1669.913512] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] return evt.wait() [ 1669.913512] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1669.913512] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] result = hub.switch() [ 1669.913512] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1669.913512] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] return self.greenlet.switch() [ 1669.913512] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1669.913512] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] self.f(*self.args, **self.kw) [ 1669.913847] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1669.913847] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] raise exceptions.translate_fault(task_info.error) [ 1669.913847] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1669.913847] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Faults: ['InvalidArgument'] [ 1669.913847] env[61663]: ERROR nova.compute.manager [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] [ 1669.913847] env[61663]: DEBUG nova.compute.utils [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1669.915450] env[61663]: DEBUG nova.compute.manager [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Build of instance f9a675b6-e76d-492b-ac34-3c7b10553fca was re-scheduled: A specified parameter was not correct: fileType [ 1669.915450] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1669.915819] env[61663]: DEBUG nova.compute.manager [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1669.915990] env[61663]: DEBUG nova.compute.manager [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1669.916180] env[61663]: DEBUG nova.compute.manager [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1669.916347] env[61663]: DEBUG nova.network.neutron [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1670.368506] env[61663]: DEBUG nova.network.neutron [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1670.384023] env[61663]: INFO nova.compute.manager [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Took 0.47 seconds to deallocate network for instance. [ 1670.504362] env[61663]: INFO nova.scheduler.client.report [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Deleted allocations for instance f9a675b6-e76d-492b-ac34-3c7b10553fca [ 1670.532635] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4256713a-3773-4d79-864f-bb8467b4c70c tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Lock "f9a675b6-e76d-492b-ac34-3c7b10553fca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 421.316s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.534160] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b9156ad7-c30a-4486-8c9c-67f574100745 tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Lock "f9a675b6-e76d-492b-ac34-3c7b10553fca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 221.120s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1670.534654] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b9156ad7-c30a-4486-8c9c-67f574100745 tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Acquiring lock "f9a675b6-e76d-492b-ac34-3c7b10553fca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1670.534654] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b9156ad7-c30a-4486-8c9c-67f574100745 tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Lock "f9a675b6-e76d-492b-ac34-3c7b10553fca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1670.534766] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b9156ad7-c30a-4486-8c9c-67f574100745 tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Lock "f9a675b6-e76d-492b-ac34-3c7b10553fca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.536789] env[61663]: INFO nova.compute.manager [None req-b9156ad7-c30a-4486-8c9c-67f574100745 tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Terminating instance [ 1670.538613] env[61663]: DEBUG nova.compute.manager [None req-b9156ad7-c30a-4486-8c9c-67f574100745 tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1670.538813] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b9156ad7-c30a-4486-8c9c-67f574100745 tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1670.539366] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4e657ccc-489e-47e5-8677-e66613e4f62c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.548725] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c6f762-67d2-4d14-b9e5-adc5190b2c1d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.561045] env[61663]: DEBUG nova.compute.manager [None req-0d29d93f-4437-4922-93b2-096a8d63acf9 tempest-AttachInterfacesTestJSON-1267786142 tempest-AttachInterfacesTestJSON-1267786142-project-member] [instance: 87c4b17f-9890-44fe-9974-0f6c45e316d3] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1670.582262] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-b9156ad7-c30a-4486-8c9c-67f574100745 tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f9a675b6-e76d-492b-ac34-3c7b10553fca could not be found. [ 1670.582262] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b9156ad7-c30a-4486-8c9c-67f574100745 tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1670.582783] env[61663]: INFO nova.compute.manager [None req-b9156ad7-c30a-4486-8c9c-67f574100745 tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1670.582783] env[61663]: DEBUG oslo.service.loopingcall [None req-b9156ad7-c30a-4486-8c9c-67f574100745 tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1670.582984] env[61663]: DEBUG nova.compute.manager [-] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1670.583091] env[61663]: DEBUG nova.network.neutron [-] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1670.596047] env[61663]: DEBUG nova.compute.manager [None req-0d29d93f-4437-4922-93b2-096a8d63acf9 tempest-AttachInterfacesTestJSON-1267786142 tempest-AttachInterfacesTestJSON-1267786142-project-member] [instance: 87c4b17f-9890-44fe-9974-0f6c45e316d3] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1670.617358] env[61663]: DEBUG nova.network.neutron [-] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1670.626875] env[61663]: INFO nova.compute.manager [-] [instance: f9a675b6-e76d-492b-ac34-3c7b10553fca] Took 0.04 seconds to deallocate network for instance. [ 1670.635665] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0d29d93f-4437-4922-93b2-096a8d63acf9 tempest-AttachInterfacesTestJSON-1267786142 tempest-AttachInterfacesTestJSON-1267786142-project-member] Lock "87c4b17f-9890-44fe-9974-0f6c45e316d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.793s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.647584] env[61663]: DEBUG nova.compute.manager [None req-f3e3a4d9-40d7-46a6-984d-99ddae74823a tempest-MigrationsAdminTest-364237900 tempest-MigrationsAdminTest-364237900-project-member] [instance: 39380f25-15a5-4d8f-b38b-39e1b3561314] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1670.686091] env[61663]: DEBUG nova.compute.manager [None req-f3e3a4d9-40d7-46a6-984d-99ddae74823a tempest-MigrationsAdminTest-364237900 tempest-MigrationsAdminTest-364237900-project-member] [instance: 39380f25-15a5-4d8f-b38b-39e1b3561314] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1670.714579] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f3e3a4d9-40d7-46a6-984d-99ddae74823a tempest-MigrationsAdminTest-364237900 tempest-MigrationsAdminTest-364237900-project-member] Lock "39380f25-15a5-4d8f-b38b-39e1b3561314" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.587s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.726364] env[61663]: DEBUG nova.compute.manager [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1670.795606] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b9156ad7-c30a-4486-8c9c-67f574100745 tempest-ServersTestFqdnHostnames-1601690275 tempest-ServersTestFqdnHostnames-1601690275-project-member] Lock "f9a675b6-e76d-492b-ac34-3c7b10553fca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.260s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.814345] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1670.814657] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1670.816151] env[61663]: INFO nova.compute.claims [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1671.223868] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae17205a-1403-4f3c-a77c-4b47881f3571 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.232669] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-244b4b1d-f0e1-4a16-bc94-1bf6bcec4a42 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.264314] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd7cfbdd-c034-4b11-846c-49427268b576 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.272698] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd58ce40-7a9b-4f3d-8774-5e1f5fd3c203 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.287297] env[61663]: DEBUG nova.compute.provider_tree [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1671.296636] env[61663]: DEBUG nova.scheduler.client.report [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1671.310266] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.495s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1671.310716] env[61663]: DEBUG nova.compute.manager [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1671.349892] env[61663]: DEBUG nova.compute.utils [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1671.351143] env[61663]: DEBUG nova.compute.manager [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1671.351316] env[61663]: DEBUG nova.network.neutron [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1671.363507] env[61663]: DEBUG nova.compute.manager [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1671.439133] env[61663]: DEBUG nova.policy [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6e1a7493d26c487ca0f096b8e780c385', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '37334157e802464c979032e60814d5f6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1671.448876] env[61663]: DEBUG nova.compute.manager [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1671.477239] env[61663]: DEBUG nova.virt.hardware [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1671.477494] env[61663]: DEBUG nova.virt.hardware [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1671.477652] env[61663]: DEBUG nova.virt.hardware [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1671.477834] env[61663]: DEBUG nova.virt.hardware [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1671.477981] env[61663]: DEBUG nova.virt.hardware [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1671.478145] env[61663]: DEBUG nova.virt.hardware [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1671.478358] env[61663]: DEBUG nova.virt.hardware [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1671.478517] env[61663]: DEBUG nova.virt.hardware [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1671.478687] env[61663]: DEBUG nova.virt.hardware [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1671.478853] env[61663]: DEBUG nova.virt.hardware [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1671.479047] env[61663]: DEBUG nova.virt.hardware [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1671.479910] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83154931-0c63-426c-9f25-a42b9f452cb3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.488684] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc2523da-e936-42a5-97bf-5a0a797939f9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.557117] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5ccab1f7-fbeb-4ca3-ae2b-247ff360ca4a tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Acquiring lock "ef8528db-1338-4af6-9d4a-5eda7fe69a98" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1671.990407] env[61663]: DEBUG nova.network.neutron [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Successfully created port: 5b17b55d-b020-4261-96c0-f4bbe4708130 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1673.229377] env[61663]: DEBUG nova.compute.manager [req-37c075e6-5e9c-408f-9342-2e27281749ec req-6fae7be2-8657-45a7-add3-486930cee33e service nova] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Received event network-vif-plugged-5b17b55d-b020-4261-96c0-f4bbe4708130 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1673.229601] env[61663]: DEBUG oslo_concurrency.lockutils [req-37c075e6-5e9c-408f-9342-2e27281749ec req-6fae7be2-8657-45a7-add3-486930cee33e service nova] Acquiring lock "ef8528db-1338-4af6-9d4a-5eda7fe69a98-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1673.229743] env[61663]: DEBUG oslo_concurrency.lockutils [req-37c075e6-5e9c-408f-9342-2e27281749ec req-6fae7be2-8657-45a7-add3-486930cee33e service nova] Lock "ef8528db-1338-4af6-9d4a-5eda7fe69a98-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1673.229862] env[61663]: DEBUG oslo_concurrency.lockutils [req-37c075e6-5e9c-408f-9342-2e27281749ec req-6fae7be2-8657-45a7-add3-486930cee33e service nova] Lock "ef8528db-1338-4af6-9d4a-5eda7fe69a98-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1673.230034] env[61663]: DEBUG nova.compute.manager [req-37c075e6-5e9c-408f-9342-2e27281749ec req-6fae7be2-8657-45a7-add3-486930cee33e service nova] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] No waiting events found dispatching network-vif-plugged-5b17b55d-b020-4261-96c0-f4bbe4708130 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1673.230474] env[61663]: WARNING nova.compute.manager [req-37c075e6-5e9c-408f-9342-2e27281749ec req-6fae7be2-8657-45a7-add3-486930cee33e service nova] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Received unexpected event network-vif-plugged-5b17b55d-b020-4261-96c0-f4bbe4708130 for instance with vm_state building and task_state deleting. [ 1673.239328] env[61663]: DEBUG nova.network.neutron [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Successfully updated port: 5b17b55d-b020-4261-96c0-f4bbe4708130 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1673.249828] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Acquiring lock "refresh_cache-ef8528db-1338-4af6-9d4a-5eda7fe69a98" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1673.249964] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Acquired lock "refresh_cache-ef8528db-1338-4af6-9d4a-5eda7fe69a98" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1673.250147] env[61663]: DEBUG nova.network.neutron [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1673.319326] env[61663]: DEBUG nova.network.neutron [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1673.604779] env[61663]: DEBUG nova.network.neutron [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Updating instance_info_cache with network_info: [{"id": "5b17b55d-b020-4261-96c0-f4bbe4708130", "address": "fa:16:3e:f6:20:e7", "network": {"id": "ca5bba34-2f43-4a1b-84a3-25f2605d1210", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-902918223-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37334157e802464c979032e60814d5f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "136c3499-9ca0-4f85-903d-1f194aa66ed9", "external-id": "nsx-vlan-transportzone-307", "segmentation_id": 307, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b17b55d-b0", "ovs_interfaceid": "5b17b55d-b020-4261-96c0-f4bbe4708130", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1673.618334] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Releasing lock "refresh_cache-ef8528db-1338-4af6-9d4a-5eda7fe69a98" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1673.618609] env[61663]: DEBUG nova.compute.manager [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Instance network_info: |[{"id": "5b17b55d-b020-4261-96c0-f4bbe4708130", "address": "fa:16:3e:f6:20:e7", "network": {"id": "ca5bba34-2f43-4a1b-84a3-25f2605d1210", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-902918223-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37334157e802464c979032e60814d5f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "136c3499-9ca0-4f85-903d-1f194aa66ed9", "external-id": "nsx-vlan-transportzone-307", "segmentation_id": 307, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b17b55d-b0", "ovs_interfaceid": "5b17b55d-b020-4261-96c0-f4bbe4708130", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1673.619009] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f6:20:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '136c3499-9ca0-4f85-903d-1f194aa66ed9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5b17b55d-b020-4261-96c0-f4bbe4708130', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1673.626541] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Creating folder: Project (37334157e802464c979032e60814d5f6). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1673.627046] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8daf1361-4a51-4057-a8bd-3f9f574e69d5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.637283] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Created folder: Project (37334157e802464c979032e60814d5f6) in parent group-v352575. [ 1673.637458] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Creating folder: Instances. Parent ref: group-v352635. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1673.637662] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e08d32b0-b11e-47da-ba45-837cedf6f537 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.646744] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Created folder: Instances in parent group-v352635. [ 1673.646964] env[61663]: DEBUG oslo.service.loopingcall [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1673.647151] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1673.647331] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aee8c7a9-8e71-4359-8ac2-5dd36922469b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.667311] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1673.667311] env[61663]: value = "task-1690757" [ 1673.667311] env[61663]: _type = "Task" [ 1673.667311] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.674662] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690757, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.177259] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690757, 'name': CreateVM_Task, 'duration_secs': 0.303756} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.177437] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1674.178160] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1674.178327] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1674.178683] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1674.178941] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-927c1581-a070-4466-aed8-ec30aa6d7c34 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.183932] env[61663]: DEBUG oslo_vmware.api [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Waiting for the task: (returnval){ [ 1674.183932] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c310ea-7919-8e6d-89b6-a8ce569c7a28" [ 1674.183932] env[61663]: _type = "Task" [ 1674.183932] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.191725] env[61663]: DEBUG oslo_vmware.api [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c310ea-7919-8e6d-89b6-a8ce569c7a28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.694356] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1674.694700] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1674.694821] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1675.262557] env[61663]: DEBUG nova.compute.manager [req-71cddc6e-030a-4be0-b9c6-a6ae236ad5dd req-bc413a99-0a9b-4262-8774-43d4f1798334 service nova] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Received event network-changed-5b17b55d-b020-4261-96c0-f4bbe4708130 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1675.262788] env[61663]: DEBUG nova.compute.manager [req-71cddc6e-030a-4be0-b9c6-a6ae236ad5dd req-bc413a99-0a9b-4262-8774-43d4f1798334 service nova] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Refreshing instance network info cache due to event network-changed-5b17b55d-b020-4261-96c0-f4bbe4708130. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1675.263013] env[61663]: DEBUG oslo_concurrency.lockutils [req-71cddc6e-030a-4be0-b9c6-a6ae236ad5dd req-bc413a99-0a9b-4262-8774-43d4f1798334 service nova] Acquiring lock "refresh_cache-ef8528db-1338-4af6-9d4a-5eda7fe69a98" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1675.263167] env[61663]: DEBUG oslo_concurrency.lockutils [req-71cddc6e-030a-4be0-b9c6-a6ae236ad5dd req-bc413a99-0a9b-4262-8774-43d4f1798334 service nova] Acquired lock "refresh_cache-ef8528db-1338-4af6-9d4a-5eda7fe69a98" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1675.263329] env[61663]: DEBUG nova.network.neutron [req-71cddc6e-030a-4be0-b9c6-a6ae236ad5dd req-bc413a99-0a9b-4262-8774-43d4f1798334 service nova] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Refreshing network info cache for port 5b17b55d-b020-4261-96c0-f4bbe4708130 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1675.714766] env[61663]: DEBUG nova.network.neutron [req-71cddc6e-030a-4be0-b9c6-a6ae236ad5dd req-bc413a99-0a9b-4262-8774-43d4f1798334 service nova] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Updated VIF entry in instance network info cache for port 5b17b55d-b020-4261-96c0-f4bbe4708130. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1675.715213] env[61663]: DEBUG nova.network.neutron [req-71cddc6e-030a-4be0-b9c6-a6ae236ad5dd req-bc413a99-0a9b-4262-8774-43d4f1798334 service nova] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Updating instance_info_cache with network_info: [{"id": "5b17b55d-b020-4261-96c0-f4bbe4708130", "address": "fa:16:3e:f6:20:e7", "network": {"id": "ca5bba34-2f43-4a1b-84a3-25f2605d1210", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-902918223-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "37334157e802464c979032e60814d5f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "136c3499-9ca0-4f85-903d-1f194aa66ed9", "external-id": "nsx-vlan-transportzone-307", "segmentation_id": 307, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b17b55d-b0", "ovs_interfaceid": "5b17b55d-b020-4261-96c0-f4bbe4708130", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1675.724706] env[61663]: DEBUG oslo_concurrency.lockutils [req-71cddc6e-030a-4be0-b9c6-a6ae236ad5dd req-bc413a99-0a9b-4262-8774-43d4f1798334 service nova] Releasing lock "refresh_cache-ef8528db-1338-4af6-9d4a-5eda7fe69a98" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1684.955899] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Acquiring lock "b583b039-84c7-4168-91a1-82821c0001a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1684.955899] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Lock "b583b039-84c7-4168-91a1-82821c0001a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1685.641353] env[61663]: DEBUG oslo_concurrency.lockutils [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Acquiring lock "b51a331f-2b96-457f-9c9e-99379e8ae7fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.641353] env[61663]: DEBUG oslo_concurrency.lockutils [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Lock "b51a331f-2b96-457f-9c9e-99379e8ae7fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1688.209804] env[61663]: DEBUG oslo_concurrency.lockutils [None req-64ed4f58-b531-489e-b926-8348f547cd97 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Acquiring lock "a0399e6e-6b1a-4702-870d-d9644c3d6545" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.210215] env[61663]: DEBUG oslo_concurrency.lockutils [None req-64ed4f58-b531-489e-b926-8348f547cd97 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Lock "a0399e6e-6b1a-4702-870d-d9644c3d6545" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1694.941574] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c0ddae20-0dff-4002-8703-f2927a609081 tempest-ServersV294TestFqdnHostnames-638770955 tempest-ServersV294TestFqdnHostnames-638770955-project-member] Acquiring lock "6d0f9509-1e63-4da8-a92b-9393a7cb4dff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1694.941914] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c0ddae20-0dff-4002-8703-f2927a609081 tempest-ServersV294TestFqdnHostnames-638770955 tempest-ServersV294TestFqdnHostnames-638770955-project-member] Lock "6d0f9509-1e63-4da8-a92b-9393a7cb4dff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.973445] env[61663]: DEBUG oslo_concurrency.lockutils [None req-353b30da-1798-4590-baf4-75a85e3180b2 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquiring lock "b1eece3b-003c-46ea-944d-ccac01ca4ba9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.973757] env[61663]: DEBUG oslo_concurrency.lockutils [None req-353b30da-1798-4590-baf4-75a85e3180b2 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Lock "b1eece3b-003c-46ea-944d-ccac01ca4ba9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.692333] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1709.692564] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1709.692658] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1709.716393] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1709.716393] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1709.716393] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1709.716393] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1709.716393] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 04488672-86c4-415b-961e-94641d570112] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1709.716670] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1709.716670] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1709.716726] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1709.716847] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1709.716984] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1709.717078] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1709.717636] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1711.861730] env[61663]: DEBUG oslo_concurrency.lockutils [None req-097ed397-2179-4bad-8690-4952b82f6804 tempest-ServerActionsTestOtherA-984726438 tempest-ServerActionsTestOtherA-984726438-project-member] Acquiring lock "56dc70b4-ebff-42c5-bbdc-bf7ca4a7c73d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1711.862017] env[61663]: DEBUG oslo_concurrency.lockutils [None req-097ed397-2179-4bad-8690-4952b82f6804 tempest-ServerActionsTestOtherA-984726438 tempest-ServerActionsTestOtherA-984726438-project-member] Lock "56dc70b4-ebff-42c5-bbdc-bf7ca4a7c73d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.692113] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1712.692628] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1714.691940] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1714.702965] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.703218] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.703384] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.703540] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1714.704755] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea04f850-96cb-44ff-868f-8096b54c6011 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.714811] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc849009-7dd9-452d-b169-2501d1104ae6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.730228] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c72f2c-c631-4f23-bf38-52cf815c5a1f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.737042] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f141243e-58ba-42ca-9006-664b0f5a656c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.769471] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181266MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1714.771464] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.771464] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.845176] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1714.845176] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 6f7a3a1f-859d-42f5-b986-6a1a038ca536 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1714.845176] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1714.845176] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ee0e3e54-c135-489f-87ca-f441efebcbd5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1714.845389] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 04488672-86c4-415b-961e-94641d570112 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1714.845389] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 7a7a0ef0-bbea-42c0-b96e-4efc4207a655 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1714.845389] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 1305216b-0ee5-499a-a82a-30b45a8c832c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1714.845389] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 47e1551e-ac80-4b4e-b568-3931c6dcf3b3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1714.845502] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1714.845502] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ef8528db-1338-4af6-9d4a-5eda7fe69a98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1714.857306] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 94f7665c-5247-4474-a9ea-700f1778af81 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1714.873583] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 768bef02-a114-4cac-a614-6e8a04ce0d18 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1714.884608] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1714.895292] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 76a61ec9-99cb-4371-9e7e-dc206c0a9d3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1714.906227] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance a63f3aaf-9a32-4782-94db-bfbbbd094530 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1714.916853] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 4cc0a356-4bc7-4713-87af-5c5c7cc792d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1714.925701] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5ee3346f-50bf-464e-a4dd-afd1edd0052a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1714.935115] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b583b039-84c7-4168-91a1-82821c0001a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1714.945683] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b51a331f-2b96-457f-9c9e-99379e8ae7fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1714.958103] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance a0399e6e-6b1a-4702-870d-d9644c3d6545 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1714.984955] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 6d0f9509-1e63-4da8-a92b-9393a7cb4dff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1714.999598] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b1eece3b-003c-46ea-944d-ccac01ca4ba9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1715.014239] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 56dc70b4-ebff-42c5-bbdc-bf7ca4a7c73d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1715.014533] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1715.014689] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1715.341804] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d9db3a9-3486-4369-a996-06c8a2e21289 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.350224] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-658b40f0-343d-4620-bf32-b99f4133ee6b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.380573] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261d2f71-9090-4393-ae94-76aabad773a2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.387977] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9003d0f1-e2bc-49ae-95d1-ee22ecf47699 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.401013] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1715.416164] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1715.431350] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1715.431547] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.662s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1717.432410] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1717.432685] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1718.692979] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1718.692979] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1719.449420] env[61663]: WARNING oslo_vmware.rw_handles [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1719.449420] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1719.449420] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1719.449420] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1719.449420] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1719.449420] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 1719.449420] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1719.449420] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1719.449420] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1719.449420] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1719.449420] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1719.449420] env[61663]: ERROR oslo_vmware.rw_handles [ 1719.450028] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/4faad1d3-4a3d-46a9-aed7-dd33b7d12863/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1719.452087] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1719.452343] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Copying Virtual Disk [datastore1] vmware_temp/4faad1d3-4a3d-46a9-aed7-dd33b7d12863/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/4faad1d3-4a3d-46a9-aed7-dd33b7d12863/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1719.452627] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c4b8fc6-abae-4a3b-976e-e9088446b5c0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.460737] env[61663]: DEBUG oslo_vmware.api [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Waiting for the task: (returnval){ [ 1719.460737] env[61663]: value = "task-1690762" [ 1719.460737] env[61663]: _type = "Task" [ 1719.460737] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.468663] env[61663]: DEBUG oslo_vmware.api [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Task: {'id': task-1690762, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.971678] env[61663]: DEBUG oslo_vmware.exceptions [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1719.972030] env[61663]: DEBUG oslo_concurrency.lockutils [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1719.972494] env[61663]: ERROR nova.compute.manager [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1719.972494] env[61663]: Faults: ['InvalidArgument'] [ 1719.972494] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Traceback (most recent call last): [ 1719.972494] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1719.972494] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] yield resources [ 1719.972494] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1719.972494] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] self.driver.spawn(context, instance, image_meta, [ 1719.972494] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1719.972494] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1719.972494] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1719.972494] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] self._fetch_image_if_missing(context, vi) [ 1719.972494] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1719.972918] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] image_cache(vi, tmp_image_ds_loc) [ 1719.972918] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1719.972918] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] vm_util.copy_virtual_disk( [ 1719.972918] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1719.972918] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] session._wait_for_task(vmdk_copy_task) [ 1719.972918] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1719.972918] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] return self.wait_for_task(task_ref) [ 1719.972918] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1719.972918] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] return evt.wait() [ 1719.972918] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1719.972918] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] result = hub.switch() [ 1719.972918] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1719.972918] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] return self.greenlet.switch() [ 1719.973332] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1719.973332] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] self.f(*self.args, **self.kw) [ 1719.973332] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1719.973332] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] raise exceptions.translate_fault(task_info.error) [ 1719.973332] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1719.973332] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Faults: ['InvalidArgument'] [ 1719.973332] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] [ 1719.973332] env[61663]: INFO nova.compute.manager [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Terminating instance [ 1719.974486] env[61663]: DEBUG oslo_concurrency.lockutils [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1719.974694] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1719.975347] env[61663]: DEBUG nova.compute.manager [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1719.975537] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1719.975761] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d441235-25a1-4567-9bb5-7b3d5a24759c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.978206] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa294fbb-c691-4267-851d-ba73c0ea6fc2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.985123] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1719.985419] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3393691f-67df-4f2a-82ce-04bf402890f8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.987727] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1719.987901] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1719.988835] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb18f8c0-084a-46c7-8adc-2689c437f91d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.993408] env[61663]: DEBUG oslo_vmware.api [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Waiting for the task: (returnval){ [ 1719.993408] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522d4d29-b8ab-2b52-fb58-870a7815dd97" [ 1719.993408] env[61663]: _type = "Task" [ 1719.993408] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.000699] env[61663]: DEBUG oslo_vmware.api [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522d4d29-b8ab-2b52-fb58-870a7815dd97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.074734] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1720.074949] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1720.075171] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Deleting the datastore file [datastore1] 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1720.075586] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1bdeef15-1119-4ea5-8964-d0f5d723658b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.082252] env[61663]: DEBUG oslo_vmware.api [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Waiting for the task: (returnval){ [ 1720.082252] env[61663]: value = "task-1690764" [ 1720.082252] env[61663]: _type = "Task" [ 1720.082252] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.090866] env[61663]: DEBUG oslo_vmware.api [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Task: {'id': task-1690764, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.504192] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1720.504417] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Creating directory with path [datastore1] vmware_temp/894eab83-72be-4f0f-89d3-9a4508245830/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1720.504654] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-694c6a76-c9b6-4be6-85c6-1e5b09935b48 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.516086] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Created directory with path [datastore1] vmware_temp/894eab83-72be-4f0f-89d3-9a4508245830/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1720.516274] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Fetch image to [datastore1] vmware_temp/894eab83-72be-4f0f-89d3-9a4508245830/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1720.516442] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/894eab83-72be-4f0f-89d3-9a4508245830/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1720.517194] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715bf46b-bebc-48ca-b644-27279cb5512b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.523519] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3df43a6a-df8a-4e30-8f01-a3be2c8af3d9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.532126] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf57d5d4-7ca8-40a6-982c-632637db3bd1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.561506] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293bba5d-d1c8-45c0-8341-b91f58add7b3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.567085] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b1476f67-65a1-421e-8f36-1b5ac3cde7cc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.590729] env[61663]: DEBUG oslo_vmware.api [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Task: {'id': task-1690764, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070745} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.592122] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1720.592321] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1720.592505] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1720.592691] env[61663]: INFO nova.compute.manager [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1720.594508] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1720.596478] env[61663]: DEBUG nova.compute.claims [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1720.596646] env[61663]: DEBUG oslo_concurrency.lockutils [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.596855] env[61663]: DEBUG oslo_concurrency.lockutils [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.650110] env[61663]: DEBUG oslo_vmware.rw_handles [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/894eab83-72be-4f0f-89d3-9a4508245830/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1720.711619] env[61663]: DEBUG oslo_vmware.rw_handles [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1720.711815] env[61663]: DEBUG oslo_vmware.rw_handles [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/894eab83-72be-4f0f-89d3-9a4508245830/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1720.956825] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994f68e6-b1b5-49f8-ae50-cdc5460c9530 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.965456] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ef8ac8-78fb-4deb-a5cd-a692d1ede64b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.647689] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb500f44-f08c-4da1-bd29-45372fb879d4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.655133] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dabd4ef4-da92-4414-a0c6-7a9392909ac4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.667834] env[61663]: DEBUG nova.compute.provider_tree [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1721.676857] env[61663]: DEBUG nova.scheduler.client.report [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1721.690320] env[61663]: DEBUG oslo_concurrency.lockutils [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.093s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1721.690847] env[61663]: ERROR nova.compute.manager [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1721.690847] env[61663]: Faults: ['InvalidArgument'] [ 1721.690847] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Traceback (most recent call last): [ 1721.690847] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1721.690847] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] self.driver.spawn(context, instance, image_meta, [ 1721.690847] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1721.690847] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1721.690847] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1721.690847] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] self._fetch_image_if_missing(context, vi) [ 1721.690847] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1721.690847] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] image_cache(vi, tmp_image_ds_loc) [ 1721.690847] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1721.691314] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] vm_util.copy_virtual_disk( [ 1721.691314] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1721.691314] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] session._wait_for_task(vmdk_copy_task) [ 1721.691314] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1721.691314] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] return self.wait_for_task(task_ref) [ 1721.691314] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1721.691314] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] return evt.wait() [ 1721.691314] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1721.691314] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] result = hub.switch() [ 1721.691314] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1721.691314] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] return self.greenlet.switch() [ 1721.691314] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1721.691314] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] self.f(*self.args, **self.kw) [ 1721.691663] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1721.691663] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] raise exceptions.translate_fault(task_info.error) [ 1721.691663] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1721.691663] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Faults: ['InvalidArgument'] [ 1721.691663] env[61663]: ERROR nova.compute.manager [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] [ 1721.691966] env[61663]: DEBUG nova.compute.utils [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1721.693297] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1721.695712] env[61663]: DEBUG nova.compute.manager [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Build of instance 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd was re-scheduled: A specified parameter was not correct: fileType [ 1721.695712] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1721.696109] env[61663]: DEBUG nova.compute.manager [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1721.696287] env[61663]: DEBUG nova.compute.manager [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1721.696452] env[61663]: DEBUG nova.compute.manager [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1721.696613] env[61663]: DEBUG nova.network.neutron [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1722.350443] env[61663]: DEBUG nova.network.neutron [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.367082] env[61663]: INFO nova.compute.manager [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Took 0.67 seconds to deallocate network for instance. [ 1722.467612] env[61663]: INFO nova.scheduler.client.report [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Deleted allocations for instance 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd [ 1722.490544] env[61663]: DEBUG oslo_concurrency.lockutils [None req-56e6181e-be17-4f38-bc5e-c81d95dd24a3 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Lock "6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 469.541s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.492973] env[61663]: DEBUG oslo_concurrency.lockutils [None req-8e3b8f34-dedb-4368-adee-342155b1ca72 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Lock "6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 271.945s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1722.492973] env[61663]: DEBUG oslo_concurrency.lockutils [None req-8e3b8f34-dedb-4368-adee-342155b1ca72 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Acquiring lock "6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1722.492973] env[61663]: DEBUG oslo_concurrency.lockutils [None req-8e3b8f34-dedb-4368-adee-342155b1ca72 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Lock "6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1722.493153] env[61663]: DEBUG oslo_concurrency.lockutils [None req-8e3b8f34-dedb-4368-adee-342155b1ca72 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Lock "6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.494840] env[61663]: INFO nova.compute.manager [None req-8e3b8f34-dedb-4368-adee-342155b1ca72 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Terminating instance [ 1722.496653] env[61663]: DEBUG nova.compute.manager [None req-8e3b8f34-dedb-4368-adee-342155b1ca72 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1722.496845] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-8e3b8f34-dedb-4368-adee-342155b1ca72 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1722.497334] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-45d51df7-4051-490d-b7f0-f14088dd60c5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.506400] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2725acf6-ecf8-4f24-8aca-69585d3dcffa {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.517623] env[61663]: DEBUG nova.compute.manager [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: 536c3f6e-757d-4b59-bf82-c01f735746d3] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1722.537511] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-8e3b8f34-dedb-4368-adee-342155b1ca72 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd could not be found. [ 1722.537717] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-8e3b8f34-dedb-4368-adee-342155b1ca72 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1722.537893] env[61663]: INFO nova.compute.manager [None req-8e3b8f34-dedb-4368-adee-342155b1ca72 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1722.538326] env[61663]: DEBUG oslo.service.loopingcall [None req-8e3b8f34-dedb-4368-adee-342155b1ca72 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1722.538552] env[61663]: DEBUG nova.compute.manager [-] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1722.538650] env[61663]: DEBUG nova.network.neutron [-] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1722.541902] env[61663]: DEBUG nova.compute.manager [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: 536c3f6e-757d-4b59-bf82-c01f735746d3] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1722.563414] env[61663]: DEBUG nova.network.neutron [-] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.565375] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Lock "536c3f6e-757d-4b59-bf82-c01f735746d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 249.091s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.571310] env[61663]: INFO nova.compute.manager [-] [instance: 6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd] Took 0.03 seconds to deallocate network for instance. [ 1722.579462] env[61663]: DEBUG nova.compute.manager [None req-41a13fac-5bcd-4bea-bb2f-ba21b899d5fc tempest-ServerRescueTestJSON-566576593 tempest-ServerRescueTestJSON-566576593-project-member] [instance: 058b6485-898e-4799-899a-df5297144271] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1722.603010] env[61663]: DEBUG nova.compute.manager [None req-41a13fac-5bcd-4bea-bb2f-ba21b899d5fc tempest-ServerRescueTestJSON-566576593 tempest-ServerRescueTestJSON-566576593-project-member] [instance: 058b6485-898e-4799-899a-df5297144271] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1722.623753] env[61663]: DEBUG oslo_concurrency.lockutils [None req-41a13fac-5bcd-4bea-bb2f-ba21b899d5fc tempest-ServerRescueTestJSON-566576593 tempest-ServerRescueTestJSON-566576593-project-member] Lock "058b6485-898e-4799-899a-df5297144271" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 240.774s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.633120] env[61663]: DEBUG nova.compute.manager [None req-fbdf756e-7265-49a3-8c71-b31b45a20dec tempest-ServerShowV254Test-647196749 tempest-ServerShowV254Test-647196749-project-member] [instance: f2a87111-4361-4e0a-940c-3c163c2d5e72] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1722.661012] env[61663]: DEBUG nova.compute.manager [None req-fbdf756e-7265-49a3-8c71-b31b45a20dec tempest-ServerShowV254Test-647196749 tempest-ServerShowV254Test-647196749-project-member] [instance: f2a87111-4361-4e0a-940c-3c163c2d5e72] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1722.666727] env[61663]: DEBUG oslo_concurrency.lockutils [None req-8e3b8f34-dedb-4368-adee-342155b1ca72 tempest-ServersAdminTestJSON-1275731029 tempest-ServersAdminTestJSON-1275731029-project-member] Lock "6ca5ca5e-1f98-4d79-b22e-b07b67b5c5bd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.175s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.683082] env[61663]: DEBUG oslo_concurrency.lockutils [None req-fbdf756e-7265-49a3-8c71-b31b45a20dec tempest-ServerShowV254Test-647196749 tempest-ServerShowV254Test-647196749-project-member] Lock "f2a87111-4361-4e0a-940c-3c163c2d5e72" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.555s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.691795] env[61663]: DEBUG nova.compute.manager [None req-338995db-ebbb-459a-aa1b-ea3a6dd7047e tempest-ImagesOneServerNegativeTestJSON-224650719 tempest-ImagesOneServerNegativeTestJSON-224650719-project-member] [instance: 26e22311-811c-49cf-b2df-40822f2e4f3c] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1722.714827] env[61663]: DEBUG nova.compute.manager [None req-338995db-ebbb-459a-aa1b-ea3a6dd7047e tempest-ImagesOneServerNegativeTestJSON-224650719 tempest-ImagesOneServerNegativeTestJSON-224650719-project-member] [instance: 26e22311-811c-49cf-b2df-40822f2e4f3c] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1722.734159] env[61663]: DEBUG oslo_concurrency.lockutils [None req-338995db-ebbb-459a-aa1b-ea3a6dd7047e tempest-ImagesOneServerNegativeTestJSON-224650719 tempest-ImagesOneServerNegativeTestJSON-224650719-project-member] Lock "26e22311-811c-49cf-b2df-40822f2e4f3c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.129s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.744623] env[61663]: DEBUG nova.compute.manager [None req-43750839-8991-4f82-9666-8097bb755029 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: f71fce22-f27b-4e5d-94e4-697d09377ed1] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1722.768617] env[61663]: DEBUG nova.compute.manager [None req-43750839-8991-4f82-9666-8097bb755029 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: f71fce22-f27b-4e5d-94e4-697d09377ed1] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1722.788040] env[61663]: DEBUG oslo_concurrency.lockutils [None req-43750839-8991-4f82-9666-8097bb755029 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Lock "f71fce22-f27b-4e5d-94e4-697d09377ed1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.525s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.797155] env[61663]: DEBUG nova.compute.manager [None req-cdd6fb62-2ec7-424d-9d2c-2c11b1adfc86 tempest-DeleteServersAdminTestJSON-1903805484 tempest-DeleteServersAdminTestJSON-1903805484-project-member] [instance: 06030fd6-0e35-42dc-bd66-cfc95930e90a] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1722.818839] env[61663]: DEBUG nova.compute.manager [None req-cdd6fb62-2ec7-424d-9d2c-2c11b1adfc86 tempest-DeleteServersAdminTestJSON-1903805484 tempest-DeleteServersAdminTestJSON-1903805484-project-member] [instance: 06030fd6-0e35-42dc-bd66-cfc95930e90a] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1722.838811] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cdd6fb62-2ec7-424d-9d2c-2c11b1adfc86 tempest-DeleteServersAdminTestJSON-1903805484 tempest-DeleteServersAdminTestJSON-1903805484-project-member] Lock "06030fd6-0e35-42dc-bd66-cfc95930e90a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.824s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.848433] env[61663]: DEBUG nova.compute.manager [None req-f6b9016b-d1cd-4ad5-921e-011dc3842892 tempest-ServersNegativeTestMultiTenantJSON-1168191994 tempest-ServersNegativeTestMultiTenantJSON-1168191994-project-member] [instance: 65cdd238-4875-4dad-9df0-0aeda65ab9ed] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1722.871933] env[61663]: DEBUG nova.compute.manager [None req-f6b9016b-d1cd-4ad5-921e-011dc3842892 tempest-ServersNegativeTestMultiTenantJSON-1168191994 tempest-ServersNegativeTestMultiTenantJSON-1168191994-project-member] [instance: 65cdd238-4875-4dad-9df0-0aeda65ab9ed] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1722.895763] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f6b9016b-d1cd-4ad5-921e-011dc3842892 tempest-ServersNegativeTestMultiTenantJSON-1168191994 tempest-ServersNegativeTestMultiTenantJSON-1168191994-project-member] Lock "65cdd238-4875-4dad-9df0-0aeda65ab9ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.512s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.906252] env[61663]: DEBUG nova.compute.manager [None req-202f74b1-1b62-4490-95f7-f625f2b42e2b tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: f3d817b9-1a93-4fb8-b25c-756de9152f17] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1722.931576] env[61663]: DEBUG nova.compute.manager [None req-202f74b1-1b62-4490-95f7-f625f2b42e2b tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: f3d817b9-1a93-4fb8-b25c-756de9152f17] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1722.951034] env[61663]: DEBUG oslo_concurrency.lockutils [None req-202f74b1-1b62-4490-95f7-f625f2b42e2b tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Lock "f3d817b9-1a93-4fb8-b25c-756de9152f17" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.313s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.958860] env[61663]: DEBUG nova.compute.manager [None req-c3f9183e-aee5-439a-8d3f-77031544ed74 tempest-InstanceActionsV221TestJSON-1618392584 tempest-InstanceActionsV221TestJSON-1618392584-project-member] [instance: 5c76183a-cdcb-49e7-95b9-75a635352479] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1722.984849] env[61663]: DEBUG nova.compute.manager [None req-c3f9183e-aee5-439a-8d3f-77031544ed74 tempest-InstanceActionsV221TestJSON-1618392584 tempest-InstanceActionsV221TestJSON-1618392584-project-member] [instance: 5c76183a-cdcb-49e7-95b9-75a635352479] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1723.009242] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c3f9183e-aee5-439a-8d3f-77031544ed74 tempest-InstanceActionsV221TestJSON-1618392584 tempest-InstanceActionsV221TestJSON-1618392584-project-member] Lock "5c76183a-cdcb-49e7-95b9-75a635352479" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.290s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.017760] env[61663]: DEBUG nova.compute.manager [None req-cbc7f19f-62ce-4589-b0f4-9f499af3a6d5 tempest-ServerMetadataTestJSON-50623780 tempest-ServerMetadataTestJSON-50623780-project-member] [instance: 14682ec1-2d3f-4601-a48e-832e7f2072d4] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1723.042713] env[61663]: DEBUG nova.compute.manager [None req-cbc7f19f-62ce-4589-b0f4-9f499af3a6d5 tempest-ServerMetadataTestJSON-50623780 tempest-ServerMetadataTestJSON-50623780-project-member] [instance: 14682ec1-2d3f-4601-a48e-832e7f2072d4] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1723.062055] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cbc7f19f-62ce-4589-b0f4-9f499af3a6d5 tempest-ServerMetadataTestJSON-50623780 tempest-ServerMetadataTestJSON-50623780-project-member] Lock "14682ec1-2d3f-4601-a48e-832e7f2072d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.558s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.069836] env[61663]: DEBUG nova.compute.manager [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1723.119112] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.119370] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.120826] env[61663]: INFO nova.compute.claims [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1723.420783] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a69a996c-ea13-41c3-8d14-ed0fe5ef3b5e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.428206] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c3ef08-13d1-4a12-ad37-ff1cbf0213dc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.456901] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b12057-487c-4cd6-aff7-694ce10232da {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.463720] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea44098-5dc8-4e03-911e-c473d2c41d4e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.476471] env[61663]: DEBUG nova.compute.provider_tree [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1723.484889] env[61663]: DEBUG nova.scheduler.client.report [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1723.499295] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.380s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.513954] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Acquiring lock "2770242d-6940-441c-9c3c-f67ae290b271" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.514227] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Lock "2770242d-6940-441c-9c3c-f67ae290b271" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.520406] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Lock "2770242d-6940-441c-9c3c-f67ae290b271" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.006s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.520883] env[61663]: DEBUG nova.compute.manager [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1723.552293] env[61663]: DEBUG nova.compute.utils [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1723.553560] env[61663]: DEBUG nova.compute.manager [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1723.553734] env[61663]: DEBUG nova.network.neutron [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1723.564201] env[61663]: DEBUG nova.compute.manager [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1723.647662] env[61663]: DEBUG nova.compute.manager [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1723.651366] env[61663]: DEBUG nova.policy [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '88f770ff47d74376b19f6ca040d1ed14', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d7b37bcd10647ccb0972b0f62036c94', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1723.676431] env[61663]: DEBUG nova.virt.hardware [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1723.676702] env[61663]: DEBUG nova.virt.hardware [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1723.676850] env[61663]: DEBUG nova.virt.hardware [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1723.677042] env[61663]: DEBUG nova.virt.hardware [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1723.677197] env[61663]: DEBUG nova.virt.hardware [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1723.677342] env[61663]: DEBUG nova.virt.hardware [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1723.677542] env[61663]: DEBUG nova.virt.hardware [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1723.677701] env[61663]: DEBUG nova.virt.hardware [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1723.677865] env[61663]: DEBUG nova.virt.hardware [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1723.678041] env[61663]: DEBUG nova.virt.hardware [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1723.678220] env[61663]: DEBUG nova.virt.hardware [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1723.679066] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53a87071-f515-4d77-b499-1fb3c0891372 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.687132] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a09afd00-b7a3-48d1-abab-0b3ded87163d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.063625] env[61663]: DEBUG nova.network.neutron [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Successfully created port: a8236ac0-cf65-42f8-8cdc-eaac6e69f485 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1724.973496] env[61663]: DEBUG nova.compute.manager [req-1e9dc0cb-63ec-4d1f-8905-23d57a367eea req-2a98fa72-5eaf-4059-9740-e20f0c936cf0 service nova] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Received event network-vif-plugged-a8236ac0-cf65-42f8-8cdc-eaac6e69f485 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1724.973496] env[61663]: DEBUG oslo_concurrency.lockutils [req-1e9dc0cb-63ec-4d1f-8905-23d57a367eea req-2a98fa72-5eaf-4059-9740-e20f0c936cf0 service nova] Acquiring lock "94f7665c-5247-4474-a9ea-700f1778af81-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1724.973496] env[61663]: DEBUG oslo_concurrency.lockutils [req-1e9dc0cb-63ec-4d1f-8905-23d57a367eea req-2a98fa72-5eaf-4059-9740-e20f0c936cf0 service nova] Lock "94f7665c-5247-4474-a9ea-700f1778af81-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1724.973496] env[61663]: DEBUG oslo_concurrency.lockutils [req-1e9dc0cb-63ec-4d1f-8905-23d57a367eea req-2a98fa72-5eaf-4059-9740-e20f0c936cf0 service nova] Lock "94f7665c-5247-4474-a9ea-700f1778af81-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.974119] env[61663]: DEBUG nova.compute.manager [req-1e9dc0cb-63ec-4d1f-8905-23d57a367eea req-2a98fa72-5eaf-4059-9740-e20f0c936cf0 service nova] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] No waiting events found dispatching network-vif-plugged-a8236ac0-cf65-42f8-8cdc-eaac6e69f485 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1724.974119] env[61663]: WARNING nova.compute.manager [req-1e9dc0cb-63ec-4d1f-8905-23d57a367eea req-2a98fa72-5eaf-4059-9740-e20f0c936cf0 service nova] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Received unexpected event network-vif-plugged-a8236ac0-cf65-42f8-8cdc-eaac6e69f485 for instance with vm_state building and task_state spawning. [ 1724.975794] env[61663]: DEBUG nova.network.neutron [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Successfully updated port: a8236ac0-cf65-42f8-8cdc-eaac6e69f485 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1724.991204] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Acquiring lock "refresh_cache-94f7665c-5247-4474-a9ea-700f1778af81" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1724.991204] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Acquired lock "refresh_cache-94f7665c-5247-4474-a9ea-700f1778af81" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1724.992439] env[61663]: DEBUG nova.network.neutron [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1725.068936] env[61663]: DEBUG nova.network.neutron [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1725.368650] env[61663]: DEBUG nova.network.neutron [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Updating instance_info_cache with network_info: [{"id": "a8236ac0-cf65-42f8-8cdc-eaac6e69f485", "address": "fa:16:3e:cd:2a:f5", "network": {"id": "f7c936d7-c73e-4f6f-80e5-9596288f4045", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-2085816332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d7b37bcd10647ccb0972b0f62036c94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8236ac0-cf", "ovs_interfaceid": "a8236ac0-cf65-42f8-8cdc-eaac6e69f485", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1725.379752] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Releasing lock "refresh_cache-94f7665c-5247-4474-a9ea-700f1778af81" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1725.380071] env[61663]: DEBUG nova.compute.manager [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Instance network_info: |[{"id": "a8236ac0-cf65-42f8-8cdc-eaac6e69f485", "address": "fa:16:3e:cd:2a:f5", "network": {"id": "f7c936d7-c73e-4f6f-80e5-9596288f4045", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-2085816332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d7b37bcd10647ccb0972b0f62036c94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8236ac0-cf", "ovs_interfaceid": "a8236ac0-cf65-42f8-8cdc-eaac6e69f485", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1725.380500] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:2a:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31e77685-b4dd-4810-80ef-24115ea9ea62', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a8236ac0-cf65-42f8-8cdc-eaac6e69f485', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1725.387807] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Creating folder: Project (3d7b37bcd10647ccb0972b0f62036c94). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1725.388371] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-09e24d75-6dba-4155-a4b8-9731cc60d082 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.399208] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Created folder: Project (3d7b37bcd10647ccb0972b0f62036c94) in parent group-v352575. [ 1725.399405] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Creating folder: Instances. Parent ref: group-v352639. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1725.399633] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-70e7f23c-f25f-429d-8c6c-4cefff739755 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.408312] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Created folder: Instances in parent group-v352639. [ 1725.408539] env[61663]: DEBUG oslo.service.loopingcall [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1725.408740] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1725.408960] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0d2acb70-7135-468a-a077-f5364a6cb74c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.427468] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1725.427468] env[61663]: value = "task-1690767" [ 1725.427468] env[61663]: _type = "Task" [ 1725.427468] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.434472] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690767, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.937299] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690767, 'name': CreateVM_Task, 'duration_secs': 0.327376} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.937446] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1725.938048] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1725.938213] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1725.938561] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1725.938779] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42428466-2dd6-4319-8879-6dde1b0e32ed {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.942965] env[61663]: DEBUG oslo_vmware.api [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Waiting for the task: (returnval){ [ 1725.942965] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52afa90b-37cb-f77f-9a18-4cc49581f859" [ 1725.942965] env[61663]: _type = "Task" [ 1725.942965] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.950328] env[61663]: DEBUG oslo_vmware.api [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52afa90b-37cb-f77f-9a18-4cc49581f859, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.454393] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1726.454672] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1726.454839] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1727.000040] env[61663]: DEBUG nova.compute.manager [req-809fe6ed-5843-4cb2-bce0-6a71afb3ce66 req-066930ee-5ea2-43cf-980d-08902489763a service nova] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Received event network-changed-a8236ac0-cf65-42f8-8cdc-eaac6e69f485 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1727.000605] env[61663]: DEBUG nova.compute.manager [req-809fe6ed-5843-4cb2-bce0-6a71afb3ce66 req-066930ee-5ea2-43cf-980d-08902489763a service nova] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Refreshing instance network info cache due to event network-changed-a8236ac0-cf65-42f8-8cdc-eaac6e69f485. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1727.000605] env[61663]: DEBUG oslo_concurrency.lockutils [req-809fe6ed-5843-4cb2-bce0-6a71afb3ce66 req-066930ee-5ea2-43cf-980d-08902489763a service nova] Acquiring lock "refresh_cache-94f7665c-5247-4474-a9ea-700f1778af81" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1727.000717] env[61663]: DEBUG oslo_concurrency.lockutils [req-809fe6ed-5843-4cb2-bce0-6a71afb3ce66 req-066930ee-5ea2-43cf-980d-08902489763a service nova] Acquired lock "refresh_cache-94f7665c-5247-4474-a9ea-700f1778af81" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1727.000821] env[61663]: DEBUG nova.network.neutron [req-809fe6ed-5843-4cb2-bce0-6a71afb3ce66 req-066930ee-5ea2-43cf-980d-08902489763a service nova] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Refreshing network info cache for port a8236ac0-cf65-42f8-8cdc-eaac6e69f485 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1727.602522] env[61663]: DEBUG nova.network.neutron [req-809fe6ed-5843-4cb2-bce0-6a71afb3ce66 req-066930ee-5ea2-43cf-980d-08902489763a service nova] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Updated VIF entry in instance network info cache for port a8236ac0-cf65-42f8-8cdc-eaac6e69f485. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1727.602887] env[61663]: DEBUG nova.network.neutron [req-809fe6ed-5843-4cb2-bce0-6a71afb3ce66 req-066930ee-5ea2-43cf-980d-08902489763a service nova] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Updating instance_info_cache with network_info: [{"id": "a8236ac0-cf65-42f8-8cdc-eaac6e69f485", "address": "fa:16:3e:cd:2a:f5", "network": {"id": "f7c936d7-c73e-4f6f-80e5-9596288f4045", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-2085816332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d7b37bcd10647ccb0972b0f62036c94", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31e77685-b4dd-4810-80ef-24115ea9ea62", "external-id": "nsx-vlan-transportzone-56", "segmentation_id": 56, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8236ac0-cf", "ovs_interfaceid": "a8236ac0-cf65-42f8-8cdc-eaac6e69f485", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1727.612499] env[61663]: DEBUG oslo_concurrency.lockutils [req-809fe6ed-5843-4cb2-bce0-6a71afb3ce66 req-066930ee-5ea2-43cf-980d-08902489763a service nova] Releasing lock "refresh_cache-94f7665c-5247-4474-a9ea-700f1778af81" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1730.032381] env[61663]: DEBUG oslo_concurrency.lockutils [None req-525652ea-eb7d-4efc-a586-861d2ae9f19b tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Acquiring lock "94f7665c-5247-4474-a9ea-700f1778af81" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1733.647442] env[61663]: DEBUG oslo_concurrency.lockutils [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Acquiring lock "440175fc-da0c-4ea3-9a74-46e97e32658b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1733.647850] env[61663]: DEBUG oslo_concurrency.lockutils [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Lock "440175fc-da0c-4ea3-9a74-46e97e32658b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.524970] env[61663]: WARNING oslo_vmware.rw_handles [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1766.524970] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1766.524970] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1766.524970] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1766.524970] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1766.524970] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 1766.524970] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1766.524970] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1766.524970] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1766.524970] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1766.524970] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1766.524970] env[61663]: ERROR oslo_vmware.rw_handles [ 1766.525590] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/894eab83-72be-4f0f-89d3-9a4508245830/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1766.527755] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1766.528024] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Copying Virtual Disk [datastore1] vmware_temp/894eab83-72be-4f0f-89d3-9a4508245830/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/894eab83-72be-4f0f-89d3-9a4508245830/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1766.528308] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f056d0c-21cf-47f4-a5b0-75298d126de2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.537457] env[61663]: DEBUG oslo_vmware.api [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Waiting for the task: (returnval){ [ 1766.537457] env[61663]: value = "task-1690768" [ 1766.537457] env[61663]: _type = "Task" [ 1766.537457] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.545596] env[61663]: DEBUG oslo_vmware.api [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Task: {'id': task-1690768, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.047487] env[61663]: DEBUG oslo_vmware.exceptions [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1767.047762] env[61663]: DEBUG oslo_concurrency.lockutils [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1767.048328] env[61663]: ERROR nova.compute.manager [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1767.048328] env[61663]: Faults: ['InvalidArgument'] [ 1767.048328] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Traceback (most recent call last): [ 1767.048328] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1767.048328] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] yield resources [ 1767.048328] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1767.048328] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] self.driver.spawn(context, instance, image_meta, [ 1767.048328] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1767.048328] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1767.048328] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1767.048328] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] self._fetch_image_if_missing(context, vi) [ 1767.048328] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1767.048696] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] image_cache(vi, tmp_image_ds_loc) [ 1767.048696] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1767.048696] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] vm_util.copy_virtual_disk( [ 1767.048696] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1767.048696] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] session._wait_for_task(vmdk_copy_task) [ 1767.048696] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1767.048696] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] return self.wait_for_task(task_ref) [ 1767.048696] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1767.048696] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] return evt.wait() [ 1767.048696] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1767.048696] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] result = hub.switch() [ 1767.048696] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1767.048696] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] return self.greenlet.switch() [ 1767.049097] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1767.049097] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] self.f(*self.args, **self.kw) [ 1767.049097] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1767.049097] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] raise exceptions.translate_fault(task_info.error) [ 1767.049097] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1767.049097] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Faults: ['InvalidArgument'] [ 1767.049097] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] [ 1767.049097] env[61663]: INFO nova.compute.manager [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Terminating instance [ 1767.050278] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1767.050478] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1767.050708] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-15190105-ad04-4e74-a329-22407c4fb14d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.053040] env[61663]: DEBUG nova.compute.manager [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1767.053239] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1767.053928] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b5d6b0e-8124-4e7f-ab23-5616d0f472d1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.060552] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1767.060818] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-df31b09d-458a-4b05-a0a7-0dd82e0a4d1f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.062855] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1767.063048] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1767.064049] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81d58853-6ef2-41c2-973c-1ab95147fd37 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.070000] env[61663]: DEBUG oslo_vmware.api [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for the task: (returnval){ [ 1767.070000] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d411f1-cbd1-d89b-3e26-58f126402898" [ 1767.070000] env[61663]: _type = "Task" [ 1767.070000] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.083710] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1767.083925] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Creating directory with path [datastore1] vmware_temp/ee22dc4c-fee8-4256-9103-a45e5c864c27/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1767.084135] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bffe728a-8748-40e2-82d4-b5516a62600d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.104330] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Created directory with path [datastore1] vmware_temp/ee22dc4c-fee8-4256-9103-a45e5c864c27/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1767.104517] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Fetch image to [datastore1] vmware_temp/ee22dc4c-fee8-4256-9103-a45e5c864c27/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1767.104693] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/ee22dc4c-fee8-4256-9103-a45e5c864c27/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1767.105410] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b9a2ef6-a821-4f28-b2cd-ffb365181733 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.111742] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b70a1483-014b-4c31-81d4-606d09f0e1a7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.120305] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3e478ef-a37a-44cf-b3e3-f818216caae5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.150904] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d115fe1c-8858-41cc-a676-125f8d06e52a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.153299] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1767.153535] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1767.153710] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Deleting the datastore file [datastore1] 6f7a3a1f-859d-42f5-b986-6a1a038ca536 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1767.153932] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-79c3dce5-2a60-478f-a1db-ec369e9be54f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.158579] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-eaec751b-d4b6-4a83-8ac0-6fb1139d7621 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.161195] env[61663]: DEBUG oslo_vmware.api [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Waiting for the task: (returnval){ [ 1767.161195] env[61663]: value = "task-1690770" [ 1767.161195] env[61663]: _type = "Task" [ 1767.161195] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.168200] env[61663]: DEBUG oslo_vmware.api [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Task: {'id': task-1690770, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.181474] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1767.233932] env[61663]: DEBUG oslo_vmware.rw_handles [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ee22dc4c-fee8-4256-9103-a45e5c864c27/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1767.295083] env[61663]: DEBUG oslo_vmware.rw_handles [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1767.295291] env[61663]: DEBUG oslo_vmware.rw_handles [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ee22dc4c-fee8-4256-9103-a45e5c864c27/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1767.673333] env[61663]: DEBUG oslo_vmware.api [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Task: {'id': task-1690770, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064338} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.673638] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1767.673822] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1767.674013] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1767.674201] env[61663]: INFO nova.compute.manager [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1767.676625] env[61663]: DEBUG nova.compute.claims [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1767.676813] env[61663]: DEBUG oslo_concurrency.lockutils [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1767.677052] env[61663]: DEBUG oslo_concurrency.lockutils [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1767.980485] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec6e45be-d0ee-4a16-9e7e-d6496d4af6e4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.988454] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fdade99-2b37-4d11-b73d-e9eeb6b58a7f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.017843] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc7d255-7954-49ba-8ccf-15dba32221cc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.024923] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801bfca1-06a0-43ba-af1d-d5e589b52bff {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.037583] env[61663]: DEBUG nova.compute.provider_tree [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1768.046494] env[61663]: DEBUG nova.scheduler.client.report [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1768.060141] env[61663]: DEBUG oslo_concurrency.lockutils [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.383s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1768.060686] env[61663]: ERROR nova.compute.manager [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1768.060686] env[61663]: Faults: ['InvalidArgument'] [ 1768.060686] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Traceback (most recent call last): [ 1768.060686] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1768.060686] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] self.driver.spawn(context, instance, image_meta, [ 1768.060686] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1768.060686] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1768.060686] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1768.060686] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] self._fetch_image_if_missing(context, vi) [ 1768.060686] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1768.060686] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] image_cache(vi, tmp_image_ds_loc) [ 1768.060686] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1768.060995] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] vm_util.copy_virtual_disk( [ 1768.060995] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1768.060995] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] session._wait_for_task(vmdk_copy_task) [ 1768.060995] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1768.060995] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] return self.wait_for_task(task_ref) [ 1768.060995] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1768.060995] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] return evt.wait() [ 1768.060995] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1768.060995] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] result = hub.switch() [ 1768.060995] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1768.060995] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] return self.greenlet.switch() [ 1768.060995] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1768.060995] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] self.f(*self.args, **self.kw) [ 1768.061311] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1768.061311] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] raise exceptions.translate_fault(task_info.error) [ 1768.061311] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1768.061311] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Faults: ['InvalidArgument'] [ 1768.061311] env[61663]: ERROR nova.compute.manager [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] [ 1768.061435] env[61663]: DEBUG nova.compute.utils [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1768.063228] env[61663]: DEBUG nova.compute.manager [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Build of instance 6f7a3a1f-859d-42f5-b986-6a1a038ca536 was re-scheduled: A specified parameter was not correct: fileType [ 1768.063228] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1768.063615] env[61663]: DEBUG nova.compute.manager [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1768.063786] env[61663]: DEBUG nova.compute.manager [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1768.063958] env[61663]: DEBUG nova.compute.manager [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1768.064141] env[61663]: DEBUG nova.network.neutron [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1768.626177] env[61663]: DEBUG nova.network.neutron [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1768.639624] env[61663]: INFO nova.compute.manager [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Took 0.57 seconds to deallocate network for instance. [ 1768.754291] env[61663]: INFO nova.scheduler.client.report [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Deleted allocations for instance 6f7a3a1f-859d-42f5-b986-6a1a038ca536 [ 1768.781850] env[61663]: DEBUG oslo_concurrency.lockutils [None req-06a9d950-b6b2-4dfa-9d53-501be7142690 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Lock "6f7a3a1f-859d-42f5-b986-6a1a038ca536" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 514.763s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1768.783012] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9bfc4456-87c3-4391-9b49-2d943f9a41b1 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Lock "6f7a3a1f-859d-42f5-b986-6a1a038ca536" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 316.466s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1768.783269] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9bfc4456-87c3-4391-9b49-2d943f9a41b1 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Acquiring lock "6f7a3a1f-859d-42f5-b986-6a1a038ca536-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1768.783493] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9bfc4456-87c3-4391-9b49-2d943f9a41b1 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Lock "6f7a3a1f-859d-42f5-b986-6a1a038ca536-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1768.783689] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9bfc4456-87c3-4391-9b49-2d943f9a41b1 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Lock "6f7a3a1f-859d-42f5-b986-6a1a038ca536-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1768.785982] env[61663]: INFO nova.compute.manager [None req-9bfc4456-87c3-4391-9b49-2d943f9a41b1 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Terminating instance [ 1768.787929] env[61663]: DEBUG nova.compute.manager [None req-9bfc4456-87c3-4391-9b49-2d943f9a41b1 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1768.788319] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-9bfc4456-87c3-4391-9b49-2d943f9a41b1 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1768.788758] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-55e66581-7c48-49a1-bb6b-1859bd9f9bdf {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.800836] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ca08ff-5988-44a6-9356-abeef50dc419 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.811202] env[61663]: DEBUG nova.compute.manager [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1768.835890] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-9bfc4456-87c3-4391-9b49-2d943f9a41b1 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6f7a3a1f-859d-42f5-b986-6a1a038ca536 could not be found. [ 1768.836176] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-9bfc4456-87c3-4391-9b49-2d943f9a41b1 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1768.836369] env[61663]: INFO nova.compute.manager [None req-9bfc4456-87c3-4391-9b49-2d943f9a41b1 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1768.836680] env[61663]: DEBUG oslo.service.loopingcall [None req-9bfc4456-87c3-4391-9b49-2d943f9a41b1 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1768.836962] env[61663]: DEBUG nova.compute.manager [-] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1768.837133] env[61663]: DEBUG nova.network.neutron [-] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1768.865056] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1768.865056] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1768.866647] env[61663]: INFO nova.compute.claims [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1768.870265] env[61663]: DEBUG nova.network.neutron [-] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1768.878584] env[61663]: INFO nova.compute.manager [-] [instance: 6f7a3a1f-859d-42f5-b986-6a1a038ca536] Took 0.04 seconds to deallocate network for instance. [ 1768.995812] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9bfc4456-87c3-4391-9b49-2d943f9a41b1 tempest-InstanceActionsTestJSON-1946227332 tempest-InstanceActionsTestJSON-1946227332-project-member] Lock "6f7a3a1f-859d-42f5-b986-6a1a038ca536" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.213s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1769.211409] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cb2bca8-1dd2-4b75-9498-8392d03c4d10 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.218888] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e121d8ea-fa35-49cc-82d7-3d0cb19d04bf {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.249149] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f33233a3-10db-4778-9b5c-3be8bbaa32c4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.256142] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc3ca244-d1e2-4b2f-8a10-dfde89a00de5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.268612] env[61663]: DEBUG nova.compute.provider_tree [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1769.277525] env[61663]: DEBUG nova.scheduler.client.report [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1769.290788] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.426s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1769.291269] env[61663]: DEBUG nova.compute.manager [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1769.330739] env[61663]: DEBUG nova.compute.utils [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1769.331959] env[61663]: DEBUG nova.compute.manager [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1769.332490] env[61663]: DEBUG nova.network.neutron [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1769.341042] env[61663]: DEBUG nova.compute.manager [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1769.393013] env[61663]: DEBUG nova.policy [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '775277db1abd4a7687d1fec15a6ec33e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8c1d0303b93a4926aa5078bddfceca6f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1769.406387] env[61663]: DEBUG nova.compute.manager [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1769.434773] env[61663]: DEBUG nova.virt.hardware [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1769.435033] env[61663]: DEBUG nova.virt.hardware [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1769.435197] env[61663]: DEBUG nova.virt.hardware [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1769.435386] env[61663]: DEBUG nova.virt.hardware [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1769.435582] env[61663]: DEBUG nova.virt.hardware [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1769.435743] env[61663]: DEBUG nova.virt.hardware [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1769.435954] env[61663]: DEBUG nova.virt.hardware [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1769.436137] env[61663]: DEBUG nova.virt.hardware [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1769.436309] env[61663]: DEBUG nova.virt.hardware [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1769.436475] env[61663]: DEBUG nova.virt.hardware [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1769.436686] env[61663]: DEBUG nova.virt.hardware [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1769.437555] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f48444-e9d4-4e13-9574-05bf759fa24f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.445392] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14388017-f811-4f83-af01-f4a70f4a7af5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.800907] env[61663]: DEBUG nova.network.neutron [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Successfully created port: 867f654d-cca3-4029-867d-82769378c52b {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1770.691808] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1770.701047] env[61663]: DEBUG nova.network.neutron [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Successfully updated port: 867f654d-cca3-4029-867d-82769378c52b {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1770.711760] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Acquiring lock "refresh_cache-768bef02-a114-4cac-a614-6e8a04ce0d18" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1770.711941] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Acquired lock "refresh_cache-768bef02-a114-4cac-a614-6e8a04ce0d18" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1770.712072] env[61663]: DEBUG nova.network.neutron [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1770.751610] env[61663]: DEBUG nova.network.neutron [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1771.037572] env[61663]: DEBUG nova.network.neutron [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Updating instance_info_cache with network_info: [{"id": "867f654d-cca3-4029-867d-82769378c52b", "address": "fa:16:3e:2d:ff:4d", "network": {"id": "c72d51a4-7f8a-41d6-a13f-27b6ed768230", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-770377509-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c1d0303b93a4926aa5078bddfceca6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd96b39f-bd2e-48d1-85c3-577cf97f08c8", "external-id": "cl2-zone-84", "segmentation_id": 84, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap867f654d-cc", "ovs_interfaceid": "867f654d-cca3-4029-867d-82769378c52b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1771.051392] env[61663]: DEBUG nova.compute.manager [req-8a690d00-0c92-4dd6-8458-bcdbc649a780 req-0e79bdb8-7b48-4b11-9711-d490dfc76909 service nova] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Received event network-vif-plugged-867f654d-cca3-4029-867d-82769378c52b {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1771.051682] env[61663]: DEBUG oslo_concurrency.lockutils [req-8a690d00-0c92-4dd6-8458-bcdbc649a780 req-0e79bdb8-7b48-4b11-9711-d490dfc76909 service nova] Acquiring lock "768bef02-a114-4cac-a614-6e8a04ce0d18-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1771.051937] env[61663]: DEBUG oslo_concurrency.lockutils [req-8a690d00-0c92-4dd6-8458-bcdbc649a780 req-0e79bdb8-7b48-4b11-9711-d490dfc76909 service nova] Lock "768bef02-a114-4cac-a614-6e8a04ce0d18-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1771.052171] env[61663]: DEBUG oslo_concurrency.lockutils [req-8a690d00-0c92-4dd6-8458-bcdbc649a780 req-0e79bdb8-7b48-4b11-9711-d490dfc76909 service nova] Lock "768bef02-a114-4cac-a614-6e8a04ce0d18-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1771.052398] env[61663]: DEBUG nova.compute.manager [req-8a690d00-0c92-4dd6-8458-bcdbc649a780 req-0e79bdb8-7b48-4b11-9711-d490dfc76909 service nova] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] No waiting events found dispatching network-vif-plugged-867f654d-cca3-4029-867d-82769378c52b {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1771.052573] env[61663]: WARNING nova.compute.manager [req-8a690d00-0c92-4dd6-8458-bcdbc649a780 req-0e79bdb8-7b48-4b11-9711-d490dfc76909 service nova] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Received unexpected event network-vif-plugged-867f654d-cca3-4029-867d-82769378c52b for instance with vm_state building and task_state spawning. [ 1771.052774] env[61663]: DEBUG nova.compute.manager [req-8a690d00-0c92-4dd6-8458-bcdbc649a780 req-0e79bdb8-7b48-4b11-9711-d490dfc76909 service nova] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Received event network-changed-867f654d-cca3-4029-867d-82769378c52b {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1771.052979] env[61663]: DEBUG nova.compute.manager [req-8a690d00-0c92-4dd6-8458-bcdbc649a780 req-0e79bdb8-7b48-4b11-9711-d490dfc76909 service nova] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Refreshing instance network info cache due to event network-changed-867f654d-cca3-4029-867d-82769378c52b. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1771.053218] env[61663]: DEBUG oslo_concurrency.lockutils [req-8a690d00-0c92-4dd6-8458-bcdbc649a780 req-0e79bdb8-7b48-4b11-9711-d490dfc76909 service nova] Acquiring lock "refresh_cache-768bef02-a114-4cac-a614-6e8a04ce0d18" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1771.054325] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Releasing lock "refresh_cache-768bef02-a114-4cac-a614-6e8a04ce0d18" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1771.054630] env[61663]: DEBUG nova.compute.manager [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Instance network_info: |[{"id": "867f654d-cca3-4029-867d-82769378c52b", "address": "fa:16:3e:2d:ff:4d", "network": {"id": "c72d51a4-7f8a-41d6-a13f-27b6ed768230", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-770377509-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c1d0303b93a4926aa5078bddfceca6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd96b39f-bd2e-48d1-85c3-577cf97f08c8", "external-id": "cl2-zone-84", "segmentation_id": 84, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap867f654d-cc", "ovs_interfaceid": "867f654d-cca3-4029-867d-82769378c52b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1771.054922] env[61663]: DEBUG oslo_concurrency.lockutils [req-8a690d00-0c92-4dd6-8458-bcdbc649a780 req-0e79bdb8-7b48-4b11-9711-d490dfc76909 service nova] Acquired lock "refresh_cache-768bef02-a114-4cac-a614-6e8a04ce0d18" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1771.055157] env[61663]: DEBUG nova.network.neutron [req-8a690d00-0c92-4dd6-8458-bcdbc649a780 req-0e79bdb8-7b48-4b11-9711-d490dfc76909 service nova] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Refreshing network info cache for port 867f654d-cca3-4029-867d-82769378c52b {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1771.057454] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:ff:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dd96b39f-bd2e-48d1-85c3-577cf97f08c8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '867f654d-cca3-4029-867d-82769378c52b', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1771.065030] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Creating folder: Project (8c1d0303b93a4926aa5078bddfceca6f). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1771.066709] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d76f9f36-34d8-4be1-af13-6080dd9fd48e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.088726] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Created folder: Project (8c1d0303b93a4926aa5078bddfceca6f) in parent group-v352575. [ 1771.088924] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Creating folder: Instances. Parent ref: group-v352642. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1771.089173] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-93b3def6-9afb-447d-bfaf-db9a25c2b162 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.099329] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Created folder: Instances in parent group-v352642. [ 1771.099559] env[61663]: DEBUG oslo.service.loopingcall [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1771.099741] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1771.100452] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-20b2b6d2-0ed4-4187-acf6-a3b76ef26f7f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.122462] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1771.122462] env[61663]: value = "task-1690773" [ 1771.122462] env[61663]: _type = "Task" [ 1771.122462] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.130226] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690773, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.632308] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690773, 'name': CreateVM_Task, 'duration_secs': 0.30359} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.632514] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1771.633518] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1771.633691] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1771.634017] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1771.634268] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c784e88-4688-430f-ba0c-b178d3968737 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.638651] env[61663]: DEBUG oslo_vmware.api [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Waiting for the task: (returnval){ [ 1771.638651] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52728475-4e1d-4501-ae54-9765505718fd" [ 1771.638651] env[61663]: _type = "Task" [ 1771.638651] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.646238] env[61663]: DEBUG oslo_vmware.api [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52728475-4e1d-4501-ae54-9765505718fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.691897] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1771.692071] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1771.692202] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1771.714513] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1771.714694] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1771.714815] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 04488672-86c4-415b-961e-94641d570112] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1771.714954] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1771.715202] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1771.715353] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1771.715479] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1771.715654] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1771.715812] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1771.715938] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1771.716072] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1771.730737] env[61663]: DEBUG nova.network.neutron [req-8a690d00-0c92-4dd6-8458-bcdbc649a780 req-0e79bdb8-7b48-4b11-9711-d490dfc76909 service nova] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Updated VIF entry in instance network info cache for port 867f654d-cca3-4029-867d-82769378c52b. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1771.731112] env[61663]: DEBUG nova.network.neutron [req-8a690d00-0c92-4dd6-8458-bcdbc649a780 req-0e79bdb8-7b48-4b11-9711-d490dfc76909 service nova] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Updating instance_info_cache with network_info: [{"id": "867f654d-cca3-4029-867d-82769378c52b", "address": "fa:16:3e:2d:ff:4d", "network": {"id": "c72d51a4-7f8a-41d6-a13f-27b6ed768230", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-770377509-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c1d0303b93a4926aa5078bddfceca6f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dd96b39f-bd2e-48d1-85c3-577cf97f08c8", "external-id": "cl2-zone-84", "segmentation_id": 84, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap867f654d-cc", "ovs_interfaceid": "867f654d-cca3-4029-867d-82769378c52b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1771.740276] env[61663]: DEBUG oslo_concurrency.lockutils [req-8a690d00-0c92-4dd6-8458-bcdbc649a780 req-0e79bdb8-7b48-4b11-9711-d490dfc76909 service nova] Releasing lock "refresh_cache-768bef02-a114-4cac-a614-6e8a04ce0d18" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1772.149619] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1772.150031] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1772.150102] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1772.692677] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1772.692908] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Cleaning up deleted instances {{(pid=61663) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1772.702454] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] There are 0 instances to clean {{(pid=61663) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1773.692579] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1773.692866] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1773.693008] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Cleaning up deleted instances with incomplete migration {{(pid=61663) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1774.698170] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1775.695631] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1775.706795] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1775.707099] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1775.707217] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.707375] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1775.709731] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c7de24-d581-40fd-96ad-23cc70ffb0e3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.718835] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-835cb795-437a-457b-ac05-7e443b5abb45 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.732910] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bfc28e1-8bbd-4091-aba6-03ccae9dd191 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.740642] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58913cd-7ab8-48ab-aecf-07222aa7661d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.773020] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181270MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1775.773020] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1775.773473] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1775.962291] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1775.962469] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ee0e3e54-c135-489f-87ca-f441efebcbd5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1775.962601] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 04488672-86c4-415b-961e-94641d570112 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1775.962727] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 7a7a0ef0-bbea-42c0-b96e-4efc4207a655 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1775.962848] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 1305216b-0ee5-499a-a82a-30b45a8c832c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1775.962967] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 47e1551e-ac80-4b4e-b568-3931c6dcf3b3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1775.963096] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1775.963213] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ef8528db-1338-4af6-9d4a-5eda7fe69a98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1775.963325] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 94f7665c-5247-4474-a9ea-700f1778af81 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1775.963438] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 768bef02-a114-4cac-a614-6e8a04ce0d18 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1775.977055] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1775.991165] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 76a61ec9-99cb-4371-9e7e-dc206c0a9d3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1776.006168] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance a63f3aaf-9a32-4782-94db-bfbbbd094530 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1776.021279] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 4cc0a356-4bc7-4713-87af-5c5c7cc792d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1776.033811] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5ee3346f-50bf-464e-a4dd-afd1edd0052a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1776.045618] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b583b039-84c7-4168-91a1-82821c0001a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1776.056834] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b51a331f-2b96-457f-9c9e-99379e8ae7fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1776.067979] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance a0399e6e-6b1a-4702-870d-d9644c3d6545 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1776.078785] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 6d0f9509-1e63-4da8-a92b-9393a7cb4dff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1776.090686] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b1eece3b-003c-46ea-944d-ccac01ca4ba9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1776.101021] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 56dc70b4-ebff-42c5-bbdc-bf7ca4a7c73d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1776.112252] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 440175fc-da0c-4ea3-9a74-46e97e32658b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1776.113026] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1776.113026] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1776.128609] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Refreshing inventories for resource provider b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1776.142843] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Updating ProviderTree inventory for provider b47d006d-a9bd-461e-a5d9-39811f005278 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1776.143097] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Updating inventory in ProviderTree for provider b47d006d-a9bd-461e-a5d9-39811f005278 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1776.158055] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Refreshing aggregate associations for resource provider b47d006d-a9bd-461e-a5d9-39811f005278, aggregates: None {{(pid=61663) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1776.176441] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Refreshing trait associations for resource provider b47d006d-a9bd-461e-a5d9-39811f005278, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61663) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1776.235920] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b2f07c63-9eec-4787-a827-3a1e067e04dd tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Acquiring lock "768bef02-a114-4cac-a614-6e8a04ce0d18" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1776.484911] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c91229-b3fb-4d8a-88d3-ed71df42e213 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.494880] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cafcd30-a7a3-46e7-8368-49429f1db5c7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.524264] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4acdc615-146a-4da0-bbf9-f264e0a17e52 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.531993] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b4a1656-c3c7-4461-9cb1-0b5a07021430 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.544909] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1776.554107] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1776.571729] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1776.571990] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.799s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1777.569034] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1777.861664] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Acquiring lock "668c457f-7ebc-441f-8ece-cc63c571363b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1777.862106] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Lock "668c457f-7ebc-441f-8ece-cc63c571363b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.294488] env[61663]: DEBUG oslo_concurrency.lockutils [None req-aaaf2e48-9df6-427b-99b5-5e2a1fdadadd tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Acquiring lock "d62bc172-a64b-481e-a2fa-55ad4ccf73f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1778.294624] env[61663]: DEBUG oslo_concurrency.lockutils [None req-aaaf2e48-9df6-427b-99b5-5e2a1fdadadd tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Lock "d62bc172-a64b-481e-a2fa-55ad4ccf73f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.692241] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1778.722427] env[61663]: DEBUG oslo_concurrency.lockutils [None req-fad726d7-dba3-4763-938c-f373b232209b tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Acquiring lock "3444cec9-7da9-47d9-b669-cd1b4261e9d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1778.722657] env[61663]: DEBUG oslo_concurrency.lockutils [None req-fad726d7-dba3-4763-938c-f373b232209b tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Lock "3444cec9-7da9-47d9-b669-cd1b4261e9d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1779.692439] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1779.692732] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1781.692775] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1782.692462] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1784.695579] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1789.043539] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._sync_power_states {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1789.065187] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Getting list of instances from cluster (obj){ [ 1789.065187] env[61663]: value = "domain-c8" [ 1789.065187] env[61663]: _type = "ClusterComputeResource" [ 1789.065187] env[61663]: } {{(pid=61663) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1789.066520] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e6e21a-a5fa-451f-a7f4-d318b7b9c609 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.083372] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Got total of 10 instances {{(pid=61663) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1789.084083] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Triggering sync for uuid 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6 {{(pid=61663) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1789.084083] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Triggering sync for uuid ee0e3e54-c135-489f-87ca-f441efebcbd5 {{(pid=61663) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1789.084083] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Triggering sync for uuid 04488672-86c4-415b-961e-94641d570112 {{(pid=61663) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1789.084083] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Triggering sync for uuid 7a7a0ef0-bbea-42c0-b96e-4efc4207a655 {{(pid=61663) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1789.084297] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Triggering sync for uuid 1305216b-0ee5-499a-a82a-30b45a8c832c {{(pid=61663) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1789.084602] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Triggering sync for uuid 47e1551e-ac80-4b4e-b568-3931c6dcf3b3 {{(pid=61663) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1789.084602] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Triggering sync for uuid 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7 {{(pid=61663) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1789.084689] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Triggering sync for uuid ef8528db-1338-4af6-9d4a-5eda7fe69a98 {{(pid=61663) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1789.084814] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Triggering sync for uuid 94f7665c-5247-4474-a9ea-700f1778af81 {{(pid=61663) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1789.084952] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Triggering sync for uuid 768bef02-a114-4cac-a614-6e8a04ce0d18 {{(pid=61663) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1789.085297] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "98eb6b3f-69c8-4837-9b5c-a1485fe5cab6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.086540] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "ee0e3e54-c135-489f-87ca-f441efebcbd5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.086540] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "04488672-86c4-415b-961e-94641d570112" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.086540] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "7a7a0ef0-bbea-42c0-b96e-4efc4207a655" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.086540] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "1305216b-0ee5-499a-a82a-30b45a8c832c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.086674] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "47e1551e-ac80-4b4e-b568-3931c6dcf3b3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.086674] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.086732] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "ef8528db-1338-4af6-9d4a-5eda7fe69a98" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.086979] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "94f7665c-5247-4474-a9ea-700f1778af81" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.087136] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "768bef02-a114-4cac-a614-6e8a04ce0d18" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1815.645891] env[61663]: WARNING oslo_vmware.rw_handles [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1815.645891] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1815.645891] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1815.645891] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1815.645891] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1815.645891] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 1815.645891] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1815.645891] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1815.645891] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1815.645891] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1815.645891] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1815.645891] env[61663]: ERROR oslo_vmware.rw_handles [ 1815.646575] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/ee22dc4c-fee8-4256-9103-a45e5c864c27/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1815.648186] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1815.648439] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Copying Virtual Disk [datastore1] vmware_temp/ee22dc4c-fee8-4256-9103-a45e5c864c27/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/ee22dc4c-fee8-4256-9103-a45e5c864c27/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1815.648721] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-32f66eb4-f390-4413-bc49-e1ad1d0f3e38 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.656656] env[61663]: DEBUG oslo_vmware.api [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for the task: (returnval){ [ 1815.656656] env[61663]: value = "task-1690774" [ 1815.656656] env[61663]: _type = "Task" [ 1815.656656] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.665552] env[61663]: DEBUG oslo_vmware.api [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Task: {'id': task-1690774, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.167339] env[61663]: DEBUG oslo_vmware.exceptions [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1816.167659] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1816.168272] env[61663]: ERROR nova.compute.manager [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1816.168272] env[61663]: Faults: ['InvalidArgument'] [ 1816.168272] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Traceback (most recent call last): [ 1816.168272] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1816.168272] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] yield resources [ 1816.168272] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1816.168272] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] self.driver.spawn(context, instance, image_meta, [ 1816.168272] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1816.168272] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1816.168272] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1816.168272] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] self._fetch_image_if_missing(context, vi) [ 1816.168272] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1816.168872] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] image_cache(vi, tmp_image_ds_loc) [ 1816.168872] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1816.168872] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] vm_util.copy_virtual_disk( [ 1816.168872] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1816.168872] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] session._wait_for_task(vmdk_copy_task) [ 1816.168872] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1816.168872] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] return self.wait_for_task(task_ref) [ 1816.168872] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1816.168872] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] return evt.wait() [ 1816.168872] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1816.168872] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] result = hub.switch() [ 1816.168872] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1816.168872] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] return self.greenlet.switch() [ 1816.169317] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1816.169317] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] self.f(*self.args, **self.kw) [ 1816.169317] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1816.169317] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] raise exceptions.translate_fault(task_info.error) [ 1816.169317] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1816.169317] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Faults: ['InvalidArgument'] [ 1816.169317] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] [ 1816.169317] env[61663]: INFO nova.compute.manager [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Terminating instance [ 1816.170648] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1816.171139] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1816.172030] env[61663]: DEBUG nova.compute.manager [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1816.172030] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1816.172030] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c78f3bc-9fab-4749-b505-35ecd30cfae8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.174449] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92579c3-bfcb-4b28-b9a3-39b727e1ca9e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.181390] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1816.181613] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2022f967-03d4-465e-a5b0-07109d0965b6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.183870] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1816.184054] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1816.184997] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7f1ef54-23a5-4b3d-a928-e03a35790f68 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.189690] env[61663]: DEBUG oslo_vmware.api [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Waiting for the task: (returnval){ [ 1816.189690] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5244bdc5-f514-3bfe-9c7c-dba263dd328a" [ 1816.189690] env[61663]: _type = "Task" [ 1816.189690] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.219336] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1816.219608] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Creating directory with path [datastore1] vmware_temp/85b5a88c-82a1-4ea8-87df-95fb7e4d9afa/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1816.219846] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-135a989c-f326-48da-9516-03961ae1641b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.239826] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Created directory with path [datastore1] vmware_temp/85b5a88c-82a1-4ea8-87df-95fb7e4d9afa/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1816.240015] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Fetch image to [datastore1] vmware_temp/85b5a88c-82a1-4ea8-87df-95fb7e4d9afa/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1816.240192] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/85b5a88c-82a1-4ea8-87df-95fb7e4d9afa/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1816.240932] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bfe1c26-7f07-4336-9a6d-1e8cc4491ec1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.249016] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f085d6-c108-4162-b587-60deac89bf19 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.257918] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aafd5c87-cc6f-45f6-bfd2-9289fc7084cc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.288669] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8304b884-7e67-4c2a-9602-4333549b8eee {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.291014] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1816.291223] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1816.291398] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Deleting the datastore file [datastore1] 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1816.291611] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-efb56ca2-7acc-4892-857e-eb54e5799d91 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.296622] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-77f37fd4-6fd1-4c8d-8ae9-ed2756a06959 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.299291] env[61663]: DEBUG oslo_vmware.api [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for the task: (returnval){ [ 1816.299291] env[61663]: value = "task-1690776" [ 1816.299291] env[61663]: _type = "Task" [ 1816.299291] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.306908] env[61663]: DEBUG oslo_vmware.api [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Task: {'id': task-1690776, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.316805] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1816.370369] env[61663]: DEBUG oslo_vmware.rw_handles [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/85b5a88c-82a1-4ea8-87df-95fb7e4d9afa/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1816.433524] env[61663]: DEBUG oslo_vmware.rw_handles [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1816.433727] env[61663]: DEBUG oslo_vmware.rw_handles [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/85b5a88c-82a1-4ea8-87df-95fb7e4d9afa/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1816.809426] env[61663]: DEBUG oslo_vmware.api [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Task: {'id': task-1690776, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069658} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.809823] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1816.810025] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1816.810108] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1816.810253] env[61663]: INFO nova.compute.manager [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1816.812504] env[61663]: DEBUG nova.compute.claims [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1816.812683] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.812895] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1817.224906] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ad285e-585d-4d1f-a07f-eb7554c6a9e5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.233037] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91892f8c-856a-426d-9e58-b6e694cbf194 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.263533] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b08aefe4-f712-402a-a9df-8dbf2f13fdf1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.270987] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fee89ae-f887-4bf6-82dd-9fb2742af5b9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.283952] env[61663]: DEBUG nova.compute.provider_tree [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1817.292693] env[61663]: DEBUG nova.scheduler.client.report [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1817.307696] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.495s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1817.308282] env[61663]: ERROR nova.compute.manager [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1817.308282] env[61663]: Faults: ['InvalidArgument'] [ 1817.308282] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Traceback (most recent call last): [ 1817.308282] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1817.308282] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] self.driver.spawn(context, instance, image_meta, [ 1817.308282] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1817.308282] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1817.308282] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1817.308282] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] self._fetch_image_if_missing(context, vi) [ 1817.308282] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1817.308282] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] image_cache(vi, tmp_image_ds_loc) [ 1817.308282] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1817.308840] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] vm_util.copy_virtual_disk( [ 1817.308840] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1817.308840] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] session._wait_for_task(vmdk_copy_task) [ 1817.308840] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1817.308840] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] return self.wait_for_task(task_ref) [ 1817.308840] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1817.308840] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] return evt.wait() [ 1817.308840] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1817.308840] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] result = hub.switch() [ 1817.308840] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1817.308840] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] return self.greenlet.switch() [ 1817.308840] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1817.308840] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] self.f(*self.args, **self.kw) [ 1817.309486] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1817.309486] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] raise exceptions.translate_fault(task_info.error) [ 1817.309486] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1817.309486] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Faults: ['InvalidArgument'] [ 1817.309486] env[61663]: ERROR nova.compute.manager [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] [ 1817.309486] env[61663]: DEBUG nova.compute.utils [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1817.310452] env[61663]: DEBUG nova.compute.manager [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Build of instance 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6 was re-scheduled: A specified parameter was not correct: fileType [ 1817.310452] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1817.310838] env[61663]: DEBUG nova.compute.manager [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1817.311028] env[61663]: DEBUG nova.compute.manager [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1817.311261] env[61663]: DEBUG nova.compute.manager [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1817.311411] env[61663]: DEBUG nova.network.neutron [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1817.620170] env[61663]: DEBUG nova.network.neutron [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1817.632666] env[61663]: INFO nova.compute.manager [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Took 0.32 seconds to deallocate network for instance. [ 1817.726215] env[61663]: INFO nova.scheduler.client.report [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Deleted allocations for instance 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6 [ 1817.750839] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c4e5a2b-808f-4b39-8673-233e70337404 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "98eb6b3f-69c8-4837-9b5c-a1485fe5cab6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 561.597s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1817.752014] env[61663]: DEBUG oslo_concurrency.lockutils [None req-086b304c-3834-4a51-bc5e-e4063ef40d29 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "98eb6b3f-69c8-4837-9b5c-a1485fe5cab6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 363.385s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1817.752248] env[61663]: DEBUG oslo_concurrency.lockutils [None req-086b304c-3834-4a51-bc5e-e4063ef40d29 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "98eb6b3f-69c8-4837-9b5c-a1485fe5cab6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1817.752465] env[61663]: DEBUG oslo_concurrency.lockutils [None req-086b304c-3834-4a51-bc5e-e4063ef40d29 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "98eb6b3f-69c8-4837-9b5c-a1485fe5cab6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1817.752633] env[61663]: DEBUG oslo_concurrency.lockutils [None req-086b304c-3834-4a51-bc5e-e4063ef40d29 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "98eb6b3f-69c8-4837-9b5c-a1485fe5cab6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1817.754595] env[61663]: INFO nova.compute.manager [None req-086b304c-3834-4a51-bc5e-e4063ef40d29 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Terminating instance [ 1817.756316] env[61663]: DEBUG nova.compute.manager [None req-086b304c-3834-4a51-bc5e-e4063ef40d29 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1817.756524] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-086b304c-3834-4a51-bc5e-e4063ef40d29 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1817.757035] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1a4781e2-9967-4f10-8c75-dae38b9f5a57 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.766037] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc89dc8-e162-4433-9fdf-a7b1cc3d75c3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.776442] env[61663]: DEBUG nova.compute.manager [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1817.796467] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-086b304c-3834-4a51-bc5e-e4063ef40d29 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6 could not be found. [ 1817.796670] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-086b304c-3834-4a51-bc5e-e4063ef40d29 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1817.796847] env[61663]: INFO nova.compute.manager [None req-086b304c-3834-4a51-bc5e-e4063ef40d29 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1817.797196] env[61663]: DEBUG oslo.service.loopingcall [None req-086b304c-3834-4a51-bc5e-e4063ef40d29 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1817.797367] env[61663]: DEBUG nova.compute.manager [-] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1817.797466] env[61663]: DEBUG nova.network.neutron [-] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1817.820293] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1817.820552] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1817.821931] env[61663]: INFO nova.compute.claims [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1817.824860] env[61663]: DEBUG nova.network.neutron [-] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1817.833660] env[61663]: INFO nova.compute.manager [-] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] Took 0.04 seconds to deallocate network for instance. [ 1817.943783] env[61663]: DEBUG oslo_concurrency.lockutils [None req-086b304c-3834-4a51-bc5e-e4063ef40d29 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "98eb6b3f-69c8-4837-9b5c-a1485fe5cab6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.192s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1817.944864] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "98eb6b3f-69c8-4837-9b5c-a1485fe5cab6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 28.860s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1817.945082] env[61663]: INFO nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 98eb6b3f-69c8-4837-9b5c-a1485fe5cab6] During sync_power_state the instance has a pending task (deleting). Skip. [ 1817.945294] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "98eb6b3f-69c8-4837-9b5c-a1485fe5cab6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.187178] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c987b37-4ca0-4bf1-acd2-6b643e92fd69 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.194220] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c88560d-fb79-4829-978e-d7b60b923f40 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.223931] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72524bd2-8b2f-4661-b10b-1b404bb0d1dd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.231029] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8845cb56-3389-4a41-9eb7-8803cc75f830 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.246561] env[61663]: DEBUG nova.compute.provider_tree [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1818.257538] env[61663]: DEBUG nova.scheduler.client.report [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1818.273506] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.452s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.273506] env[61663]: DEBUG nova.compute.manager [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1818.315241] env[61663]: DEBUG nova.compute.utils [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1818.316956] env[61663]: DEBUG nova.compute.manager [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1818.317158] env[61663]: DEBUG nova.network.neutron [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1818.326181] env[61663]: DEBUG nova.compute.manager [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1818.391728] env[61663]: DEBUG nova.compute.manager [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1818.419647] env[61663]: DEBUG nova.virt.hardware [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1818.419911] env[61663]: DEBUG nova.virt.hardware [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1818.420084] env[61663]: DEBUG nova.virt.hardware [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1818.420279] env[61663]: DEBUG nova.virt.hardware [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1818.420428] env[61663]: DEBUG nova.virt.hardware [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1818.420575] env[61663]: DEBUG nova.virt.hardware [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1818.420782] env[61663]: DEBUG nova.virt.hardware [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1818.420943] env[61663]: DEBUG nova.virt.hardware [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1818.421128] env[61663]: DEBUG nova.virt.hardware [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1818.421296] env[61663]: DEBUG nova.virt.hardware [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1818.421481] env[61663]: DEBUG nova.virt.hardware [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1818.422344] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c4bfbd3-a728-41c8-8d94-4bb4ebc9e0eb {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.429911] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a634d9-b6b1-4505-b17f-d5bb4bfe7c90 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.444477] env[61663]: DEBUG nova.policy [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '033e5ebd18fb421b8ad3f4ad5033f1b5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7bb1bdc9b1004ff591ab4e001d81b400', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1818.988124] env[61663]: DEBUG nova.network.neutron [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Successfully created port: 0ffe86ee-b1b3-4d1f-a850-c30a019cd60f {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1819.825025] env[61663]: DEBUG nova.compute.manager [req-65d033f8-cf81-4ce1-a711-6088c5280354 req-fa54a698-64e6-488c-b1f7-addccdaebd70 service nova] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Received event network-vif-plugged-0ffe86ee-b1b3-4d1f-a850-c30a019cd60f {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1819.825025] env[61663]: DEBUG oslo_concurrency.lockutils [req-65d033f8-cf81-4ce1-a711-6088c5280354 req-fa54a698-64e6-488c-b1f7-addccdaebd70 service nova] Acquiring lock "46ec5076-51f1-4ac9-915e-0d98ee7b1d4c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.825025] env[61663]: DEBUG oslo_concurrency.lockutils [req-65d033f8-cf81-4ce1-a711-6088c5280354 req-fa54a698-64e6-488c-b1f7-addccdaebd70 service nova] Lock "46ec5076-51f1-4ac9-915e-0d98ee7b1d4c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.825025] env[61663]: DEBUG oslo_concurrency.lockutils [req-65d033f8-cf81-4ce1-a711-6088c5280354 req-fa54a698-64e6-488c-b1f7-addccdaebd70 service nova] Lock "46ec5076-51f1-4ac9-915e-0d98ee7b1d4c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.825195] env[61663]: DEBUG nova.compute.manager [req-65d033f8-cf81-4ce1-a711-6088c5280354 req-fa54a698-64e6-488c-b1f7-addccdaebd70 service nova] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] No waiting events found dispatching network-vif-plugged-0ffe86ee-b1b3-4d1f-a850-c30a019cd60f {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1819.825195] env[61663]: WARNING nova.compute.manager [req-65d033f8-cf81-4ce1-a711-6088c5280354 req-fa54a698-64e6-488c-b1f7-addccdaebd70 service nova] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Received unexpected event network-vif-plugged-0ffe86ee-b1b3-4d1f-a850-c30a019cd60f for instance with vm_state building and task_state spawning. [ 1819.920564] env[61663]: DEBUG nova.network.neutron [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Successfully updated port: 0ffe86ee-b1b3-4d1f-a850-c30a019cd60f {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1819.933327] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "refresh_cache-46ec5076-51f1-4ac9-915e-0d98ee7b1d4c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1819.933528] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquired lock "refresh_cache-46ec5076-51f1-4ac9-915e-0d98ee7b1d4c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1819.933945] env[61663]: DEBUG nova.network.neutron [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1819.973288] env[61663]: DEBUG nova.network.neutron [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1820.151929] env[61663]: DEBUG nova.network.neutron [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Updating instance_info_cache with network_info: [{"id": "0ffe86ee-b1b3-4d1f-a850-c30a019cd60f", "address": "fa:16:3e:b2:6c:8d", "network": {"id": "063a4d32-b405-433b-a1a0-ce9820683878", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-673894266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bb1bdc9b1004ff591ab4e001d81b400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ffe86ee-b1", "ovs_interfaceid": "0ffe86ee-b1b3-4d1f-a850-c30a019cd60f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1820.163679] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Releasing lock "refresh_cache-46ec5076-51f1-4ac9-915e-0d98ee7b1d4c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1820.163979] env[61663]: DEBUG nova.compute.manager [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Instance network_info: |[{"id": "0ffe86ee-b1b3-4d1f-a850-c30a019cd60f", "address": "fa:16:3e:b2:6c:8d", "network": {"id": "063a4d32-b405-433b-a1a0-ce9820683878", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-673894266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bb1bdc9b1004ff591ab4e001d81b400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ffe86ee-b1", "ovs_interfaceid": "0ffe86ee-b1b3-4d1f-a850-c30a019cd60f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1820.164554] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:6c:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa09e855-8af1-419b-b78d-8ffcc94b1bfb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0ffe86ee-b1b3-4d1f-a850-c30a019cd60f', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1820.171980] env[61663]: DEBUG oslo.service.loopingcall [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1820.172432] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1820.172663] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3be7d662-9c9b-450a-9cb5-337b348f3c8c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.192670] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1820.192670] env[61663]: value = "task-1690777" [ 1820.192670] env[61663]: _type = "Task" [ 1820.192670] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.204490] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690777, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.703851] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690777, 'name': CreateVM_Task, 'duration_secs': 0.311765} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.704216] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1820.704834] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1820.705045] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1820.705544] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1820.705818] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-115cc763-79a3-4633-9299-f4e299512700 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.712145] env[61663]: DEBUG oslo_vmware.api [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for the task: (returnval){ [ 1820.712145] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5293389e-9cd1-4b1b-c66c-05d6a7440d66" [ 1820.712145] env[61663]: _type = "Task" [ 1820.712145] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.718980] env[61663]: DEBUG oslo_vmware.api [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5293389e-9cd1-4b1b-c66c-05d6a7440d66, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.221967] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1821.222343] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1821.222451] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.867667] env[61663]: DEBUG nova.compute.manager [req-c553bf81-4083-492e-8002-7f527465c1e5 req-747f1320-8300-40cc-81cf-65065321b294 service nova] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Received event network-changed-0ffe86ee-b1b3-4d1f-a850-c30a019cd60f {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1821.867884] env[61663]: DEBUG nova.compute.manager [req-c553bf81-4083-492e-8002-7f527465c1e5 req-747f1320-8300-40cc-81cf-65065321b294 service nova] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Refreshing instance network info cache due to event network-changed-0ffe86ee-b1b3-4d1f-a850-c30a019cd60f. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1821.868115] env[61663]: DEBUG oslo_concurrency.lockutils [req-c553bf81-4083-492e-8002-7f527465c1e5 req-747f1320-8300-40cc-81cf-65065321b294 service nova] Acquiring lock "refresh_cache-46ec5076-51f1-4ac9-915e-0d98ee7b1d4c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.868290] env[61663]: DEBUG oslo_concurrency.lockutils [req-c553bf81-4083-492e-8002-7f527465c1e5 req-747f1320-8300-40cc-81cf-65065321b294 service nova] Acquired lock "refresh_cache-46ec5076-51f1-4ac9-915e-0d98ee7b1d4c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1821.868466] env[61663]: DEBUG nova.network.neutron [req-c553bf81-4083-492e-8002-7f527465c1e5 req-747f1320-8300-40cc-81cf-65065321b294 service nova] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Refreshing network info cache for port 0ffe86ee-b1b3-4d1f-a850-c30a019cd60f {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1822.245048] env[61663]: DEBUG nova.network.neutron [req-c553bf81-4083-492e-8002-7f527465c1e5 req-747f1320-8300-40cc-81cf-65065321b294 service nova] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Updated VIF entry in instance network info cache for port 0ffe86ee-b1b3-4d1f-a850-c30a019cd60f. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1822.245612] env[61663]: DEBUG nova.network.neutron [req-c553bf81-4083-492e-8002-7f527465c1e5 req-747f1320-8300-40cc-81cf-65065321b294 service nova] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Updating instance_info_cache with network_info: [{"id": "0ffe86ee-b1b3-4d1f-a850-c30a019cd60f", "address": "fa:16:3e:b2:6c:8d", "network": {"id": "063a4d32-b405-433b-a1a0-ce9820683878", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-673894266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bb1bdc9b1004ff591ab4e001d81b400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ffe86ee-b1", "ovs_interfaceid": "0ffe86ee-b1b3-4d1f-a850-c30a019cd60f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1822.255340] env[61663]: DEBUG oslo_concurrency.lockutils [req-c553bf81-4083-492e-8002-7f527465c1e5 req-747f1320-8300-40cc-81cf-65065321b294 service nova] Releasing lock "refresh_cache-46ec5076-51f1-4ac9-915e-0d98ee7b1d4c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1832.735450] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1833.691995] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1833.692205] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1833.692331] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1833.714864] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1833.715016] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 04488672-86c4-415b-961e-94641d570112] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1833.715151] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1833.715279] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1833.715401] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1833.715534] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1833.715648] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1833.715776] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1833.715885] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1833.716000] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1833.716142] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1834.691582] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1836.688091] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1836.691773] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1836.691980] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1836.703803] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.704038] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.704216] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.704380] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1836.705519] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19be4e3a-07f0-427b-9811-d07b951e1ab0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.714247] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-129947ac-22c3-408e-a245-96b147d09f18 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.728330] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf1ef48-38b6-4101-8881-7da50230dc60 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.734619] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4323622-059a-427a-b096-1eccd9bddf5e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.764773] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181269MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1836.764921] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.765136] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.843358] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ee0e3e54-c135-489f-87ca-f441efebcbd5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1836.843524] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 04488672-86c4-415b-961e-94641d570112 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1836.843654] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 7a7a0ef0-bbea-42c0-b96e-4efc4207a655 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1836.843779] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 1305216b-0ee5-499a-a82a-30b45a8c832c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1836.843902] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 47e1551e-ac80-4b4e-b568-3931c6dcf3b3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1836.844031] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1836.844157] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ef8528db-1338-4af6-9d4a-5eda7fe69a98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1836.844272] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 94f7665c-5247-4474-a9ea-700f1778af81 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1836.844385] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 768bef02-a114-4cac-a614-6e8a04ce0d18 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1836.844496] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1836.872669] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 76a61ec9-99cb-4371-9e7e-dc206c0a9d3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1836.884624] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance a63f3aaf-9a32-4782-94db-bfbbbd094530 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1836.894890] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 4cc0a356-4bc7-4713-87af-5c5c7cc792d4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1836.906208] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5ee3346f-50bf-464e-a4dd-afd1edd0052a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1836.915689] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b583b039-84c7-4168-91a1-82821c0001a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1836.926980] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b51a331f-2b96-457f-9c9e-99379e8ae7fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1836.938259] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance a0399e6e-6b1a-4702-870d-d9644c3d6545 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1836.947224] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 6d0f9509-1e63-4da8-a92b-9393a7cb4dff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1836.962604] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b1eece3b-003c-46ea-944d-ccac01ca4ba9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1836.971765] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 56dc70b4-ebff-42c5-bbdc-bf7ca4a7c73d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1836.982793] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 440175fc-da0c-4ea3-9a74-46e97e32658b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1836.991521] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 668c457f-7ebc-441f-8ece-cc63c571363b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1837.001348] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance d62bc172-a64b-481e-a2fa-55ad4ccf73f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1837.011235] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 3444cec9-7da9-47d9-b669-cd1b4261e9d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1837.011467] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1837.011648] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1837.266986] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5178415f-b03e-4625-b012-e386f67c7455 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.274369] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ad85d2-de00-49df-93bc-7bb428a2efde {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.304895] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ca6925-a96b-420b-b74d-c04a2c03649f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.311938] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4713d110-e322-44ca-b95d-6fbc43318635 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.324545] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1837.332761] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1837.349307] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1837.349491] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.584s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.350919] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1841.693039] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1841.693039] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1841.693039] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1847.863421] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c968449f-ff43-4cb1-8d77-f0f88261df54 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "46ec5076-51f1-4ac9-915e-0d98ee7b1d4c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.810621] env[61663]: DEBUG oslo_concurrency.lockutils [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Acquiring lock "ae347f45-f39e-47eb-9e37-80ddfc502c27" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.810621] env[61663]: DEBUG oslo_concurrency.lockutils [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Lock "ae347f45-f39e-47eb-9e37-80ddfc502c27" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.810621] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0be60762-7a40-4c6c-b95c-8e521424ddd8 tempest-AttachInterfacesTestJSON-1267786142 tempest-AttachInterfacesTestJSON-1267786142-project-member] Acquiring lock "59d4580f-5897-42d6-82cb-0aead4d2658c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.811065] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0be60762-7a40-4c6c-b95c-8e521424ddd8 tempest-AttachInterfacesTestJSON-1267786142 tempest-AttachInterfacesTestJSON-1267786142-project-member] Lock "59d4580f-5897-42d6-82cb-0aead4d2658c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.666656] env[61663]: WARNING oslo_vmware.rw_handles [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1865.666656] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1865.666656] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1865.666656] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1865.666656] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1865.666656] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 1865.666656] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1865.666656] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1865.666656] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1865.666656] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1865.666656] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1865.666656] env[61663]: ERROR oslo_vmware.rw_handles [ 1865.667196] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/85b5a88c-82a1-4ea8-87df-95fb7e4d9afa/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1865.669184] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1865.669456] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Copying Virtual Disk [datastore1] vmware_temp/85b5a88c-82a1-4ea8-87df-95fb7e4d9afa/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/85b5a88c-82a1-4ea8-87df-95fb7e4d9afa/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1865.669790] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8a50c234-744b-4358-88a4-de61b2925108 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.679171] env[61663]: DEBUG oslo_vmware.api [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Waiting for the task: (returnval){ [ 1865.679171] env[61663]: value = "task-1690778" [ 1865.679171] env[61663]: _type = "Task" [ 1865.679171] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.687250] env[61663]: DEBUG oslo_vmware.api [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Task: {'id': task-1690778, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.190835] env[61663]: DEBUG oslo_vmware.exceptions [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1866.191185] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1866.191740] env[61663]: ERROR nova.compute.manager [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1866.191740] env[61663]: Faults: ['InvalidArgument'] [ 1866.191740] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Traceback (most recent call last): [ 1866.191740] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1866.191740] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] yield resources [ 1866.191740] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1866.191740] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] self.driver.spawn(context, instance, image_meta, [ 1866.191740] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1866.191740] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1866.191740] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1866.191740] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] self._fetch_image_if_missing(context, vi) [ 1866.191740] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1866.192126] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] image_cache(vi, tmp_image_ds_loc) [ 1866.192126] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1866.192126] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] vm_util.copy_virtual_disk( [ 1866.192126] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1866.192126] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] session._wait_for_task(vmdk_copy_task) [ 1866.192126] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1866.192126] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] return self.wait_for_task(task_ref) [ 1866.192126] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1866.192126] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] return evt.wait() [ 1866.192126] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1866.192126] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] result = hub.switch() [ 1866.192126] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1866.192126] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] return self.greenlet.switch() [ 1866.192570] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1866.192570] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] self.f(*self.args, **self.kw) [ 1866.192570] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1866.192570] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] raise exceptions.translate_fault(task_info.error) [ 1866.192570] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1866.192570] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Faults: ['InvalidArgument'] [ 1866.192570] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] [ 1866.192570] env[61663]: INFO nova.compute.manager [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Terminating instance [ 1866.193677] env[61663]: DEBUG oslo_concurrency.lockutils [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1866.193886] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1866.194434] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f7b4b8c-3bc4-49b7-b900-6c740237b189 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.196991] env[61663]: DEBUG nova.compute.manager [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1866.197202] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1866.198204] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3c47ead-c7cd-4d25-86d7-1fad4f647631 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.207848] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1866.208908] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b35b3d92-55fb-4f9a-a270-a5a99755811b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.210334] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1866.210509] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1866.211208] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbc8518c-a3b0-4acd-9eb3-6bc7f88e38f3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.216310] env[61663]: DEBUG oslo_vmware.api [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Waiting for the task: (returnval){ [ 1866.216310] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52710be1-39e5-1e9c-38cf-fc1437881ff7" [ 1866.216310] env[61663]: _type = "Task" [ 1866.216310] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.223440] env[61663]: DEBUG oslo_vmware.api [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52710be1-39e5-1e9c-38cf-fc1437881ff7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.288018] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1866.288258] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1866.288447] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Deleting the datastore file [datastore1] ee0e3e54-c135-489f-87ca-f441efebcbd5 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1866.288745] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ae4ffc7-c877-47f6-9ad2-d9dd5dca2585 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.294870] env[61663]: DEBUG oslo_vmware.api [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Waiting for the task: (returnval){ [ 1866.294870] env[61663]: value = "task-1690780" [ 1866.294870] env[61663]: _type = "Task" [ 1866.294870] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.302490] env[61663]: DEBUG oslo_vmware.api [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Task: {'id': task-1690780, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.726225] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1866.726513] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Creating directory with path [datastore1] vmware_temp/41880d64-d760-492c-8658-66df57a9bc43/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1866.726758] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a2d95c62-f3b1-4c37-ba38-12125a60cf42 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.739752] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Created directory with path [datastore1] vmware_temp/41880d64-d760-492c-8658-66df57a9bc43/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1866.739953] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Fetch image to [datastore1] vmware_temp/41880d64-d760-492c-8658-66df57a9bc43/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1866.740146] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/41880d64-d760-492c-8658-66df57a9bc43/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1866.740915] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec73565-6f41-416a-a5ee-5cd271ce0465 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.747464] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-879c5f18-a9ad-42bd-9f26-e7962507c78f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.758026] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-601477f9-99cc-4d48-be03-f31f014c6bd5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.788158] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c0b8dd-f34f-4e55-b6b1-499d58298938 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.793846] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0a538316-6bf5-42cd-899d-822e46ce2287 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.803409] env[61663]: DEBUG oslo_vmware.api [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Task: {'id': task-1690780, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.085641} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.803611] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1866.803800] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1866.804478] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1866.804478] env[61663]: INFO nova.compute.manager [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1866.806259] env[61663]: DEBUG nova.compute.claims [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1866.806434] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1866.806741] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.820695] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1867.062230] env[61663]: DEBUG oslo_vmware.rw_handles [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/41880d64-d760-492c-8658-66df57a9bc43/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1867.125643] env[61663]: DEBUG oslo_vmware.rw_handles [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1867.125849] env[61663]: DEBUG oslo_vmware.rw_handles [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/41880d64-d760-492c-8658-66df57a9bc43/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1867.239199] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25f7a3b-f56a-4c7a-acac-a6e9df726c30 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.249083] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea5470a-246a-4ea5-a365-abf405190b23 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.279807] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-984866b7-6eb2-4e64-8e0a-bb14c4de04ca {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.287668] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adbbd017-5517-4b8e-8ebf-6a5828cc0c7b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.302319] env[61663]: DEBUG nova.compute.provider_tree [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1867.312403] env[61663]: DEBUG nova.scheduler.client.report [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1867.328977] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.521s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.328977] env[61663]: ERROR nova.compute.manager [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1867.328977] env[61663]: Faults: ['InvalidArgument'] [ 1867.328977] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Traceback (most recent call last): [ 1867.328977] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1867.328977] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] self.driver.spawn(context, instance, image_meta, [ 1867.328977] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1867.328977] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1867.328977] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1867.328977] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] self._fetch_image_if_missing(context, vi) [ 1867.329365] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1867.329365] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] image_cache(vi, tmp_image_ds_loc) [ 1867.329365] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1867.329365] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] vm_util.copy_virtual_disk( [ 1867.329365] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1867.329365] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] session._wait_for_task(vmdk_copy_task) [ 1867.329365] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1867.329365] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] return self.wait_for_task(task_ref) [ 1867.329365] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1867.329365] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] return evt.wait() [ 1867.329365] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1867.329365] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] result = hub.switch() [ 1867.329365] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1867.329698] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] return self.greenlet.switch() [ 1867.329698] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1867.329698] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] self.f(*self.args, **self.kw) [ 1867.329698] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1867.329698] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] raise exceptions.translate_fault(task_info.error) [ 1867.329698] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1867.329698] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Faults: ['InvalidArgument'] [ 1867.329698] env[61663]: ERROR nova.compute.manager [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] [ 1867.329698] env[61663]: DEBUG nova.compute.utils [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1867.331784] env[61663]: DEBUG nova.compute.manager [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Build of instance ee0e3e54-c135-489f-87ca-f441efebcbd5 was re-scheduled: A specified parameter was not correct: fileType [ 1867.331784] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1867.332899] env[61663]: DEBUG nova.compute.manager [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1867.332899] env[61663]: DEBUG nova.compute.manager [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1867.332899] env[61663]: DEBUG nova.compute.manager [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1867.332899] env[61663]: DEBUG nova.network.neutron [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1867.787144] env[61663]: DEBUG nova.network.neutron [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1867.806254] env[61663]: INFO nova.compute.manager [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Took 0.47 seconds to deallocate network for instance. [ 1867.970653] env[61663]: INFO nova.scheduler.client.report [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Deleted allocations for instance ee0e3e54-c135-489f-87ca-f441efebcbd5 [ 1868.002585] env[61663]: DEBUG oslo_concurrency.lockutils [None req-cc5092e9-f283-45c5-ae4a-ff98c5637397 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Lock "ee0e3e54-c135-489f-87ca-f441efebcbd5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 609.235s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.002585] env[61663]: DEBUG oslo_concurrency.lockutils [None req-50e5b227-1637-4ace-86a7-78dfd9c9ca32 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Lock "ee0e3e54-c135-489f-87ca-f441efebcbd5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 411.637s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.002585] env[61663]: DEBUG oslo_concurrency.lockutils [None req-50e5b227-1637-4ace-86a7-78dfd9c9ca32 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Acquiring lock "ee0e3e54-c135-489f-87ca-f441efebcbd5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1868.002769] env[61663]: DEBUG oslo_concurrency.lockutils [None req-50e5b227-1637-4ace-86a7-78dfd9c9ca32 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Lock "ee0e3e54-c135-489f-87ca-f441efebcbd5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.002769] env[61663]: DEBUG oslo_concurrency.lockutils [None req-50e5b227-1637-4ace-86a7-78dfd9c9ca32 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Lock "ee0e3e54-c135-489f-87ca-f441efebcbd5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.007304] env[61663]: INFO nova.compute.manager [None req-50e5b227-1637-4ace-86a7-78dfd9c9ca32 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Terminating instance [ 1868.010695] env[61663]: DEBUG nova.compute.manager [None req-50e5b227-1637-4ace-86a7-78dfd9c9ca32 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1868.011254] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-50e5b227-1637-4ace-86a7-78dfd9c9ca32 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1868.011377] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3e72f8c9-eff1-46a7-a7a2-9026d2bc3975 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.016993] env[61663]: DEBUG nova.compute.manager [None req-bd6de649-c90a-41f0-897f-c87d7fe10e2e tempest-ServerAddressesNegativeTestJSON-2016992940 tempest-ServerAddressesNegativeTestJSON-2016992940-project-member] [instance: 76a61ec9-99cb-4371-9e7e-dc206c0a9d3d] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1868.027509] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-075cd99f-b0c6-4955-a046-8dcaf45da09d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.065404] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-50e5b227-1637-4ace-86a7-78dfd9c9ca32 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ee0e3e54-c135-489f-87ca-f441efebcbd5 could not be found. [ 1868.065618] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-50e5b227-1637-4ace-86a7-78dfd9c9ca32 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1868.065811] env[61663]: INFO nova.compute.manager [None req-50e5b227-1637-4ace-86a7-78dfd9c9ca32 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1868.066087] env[61663]: DEBUG oslo.service.loopingcall [None req-50e5b227-1637-4ace-86a7-78dfd9c9ca32 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1868.066654] env[61663]: DEBUG nova.compute.manager [None req-bd6de649-c90a-41f0-897f-c87d7fe10e2e tempest-ServerAddressesNegativeTestJSON-2016992940 tempest-ServerAddressesNegativeTestJSON-2016992940-project-member] [instance: 76a61ec9-99cb-4371-9e7e-dc206c0a9d3d] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1868.067880] env[61663]: DEBUG nova.compute.manager [-] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1868.067981] env[61663]: DEBUG nova.network.neutron [-] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1868.097740] env[61663]: DEBUG oslo_concurrency.lockutils [None req-bd6de649-c90a-41f0-897f-c87d7fe10e2e tempest-ServerAddressesNegativeTestJSON-2016992940 tempest-ServerAddressesNegativeTestJSON-2016992940-project-member] Lock "76a61ec9-99cb-4371-9e7e-dc206c0a9d3d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.166s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.110334] env[61663]: DEBUG nova.compute.manager [None req-38d90fad-8aec-4730-a31a-c9562e473210 tempest-FloatingIPsAssociationNegativeTestJSON-2134864087 tempest-FloatingIPsAssociationNegativeTestJSON-2134864087-project-member] [instance: a63f3aaf-9a32-4782-94db-bfbbbd094530] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1868.113948] env[61663]: DEBUG nova.network.neutron [-] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.125995] env[61663]: INFO nova.compute.manager [-] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] Took 0.06 seconds to deallocate network for instance. [ 1868.150259] env[61663]: DEBUG nova.compute.manager [None req-38d90fad-8aec-4730-a31a-c9562e473210 tempest-FloatingIPsAssociationNegativeTestJSON-2134864087 tempest-FloatingIPsAssociationNegativeTestJSON-2134864087-project-member] [instance: a63f3aaf-9a32-4782-94db-bfbbbd094530] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1868.184934] env[61663]: DEBUG oslo_concurrency.lockutils [None req-38d90fad-8aec-4730-a31a-c9562e473210 tempest-FloatingIPsAssociationNegativeTestJSON-2134864087 tempest-FloatingIPsAssociationNegativeTestJSON-2134864087-project-member] Lock "a63f3aaf-9a32-4782-94db-bfbbbd094530" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.494s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.195703] env[61663]: DEBUG nova.compute.manager [None req-760acb0b-205a-4de7-a63f-ca1dc75b6d7a tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: 4cc0a356-4bc7-4713-87af-5c5c7cc792d4] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1868.231064] env[61663]: DEBUG nova.compute.manager [None req-760acb0b-205a-4de7-a63f-ca1dc75b6d7a tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: 4cc0a356-4bc7-4713-87af-5c5c7cc792d4] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1868.249258] env[61663]: DEBUG oslo_concurrency.lockutils [None req-50e5b227-1637-4ace-86a7-78dfd9c9ca32 tempest-VolumesAssistedSnapshotsTest-1843675215 tempest-VolumesAssistedSnapshotsTest-1843675215-project-member] Lock "ee0e3e54-c135-489f-87ca-f441efebcbd5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.248s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.250784] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "ee0e3e54-c135-489f-87ca-f441efebcbd5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 79.165s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.250981] env[61663]: INFO nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: ee0e3e54-c135-489f-87ca-f441efebcbd5] During sync_power_state the instance has a pending task (deleting). Skip. [ 1868.251176] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "ee0e3e54-c135-489f-87ca-f441efebcbd5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.254415] env[61663]: DEBUG oslo_concurrency.lockutils [None req-760acb0b-205a-4de7-a63f-ca1dc75b6d7a tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Lock "4cc0a356-4bc7-4713-87af-5c5c7cc792d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.140s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.262860] env[61663]: DEBUG nova.compute.manager [None req-e8d01236-f3c0-4a6c-9f03-3fa99139f420 tempest-AttachInterfacesTestJSON-1267786142 tempest-AttachInterfacesTestJSON-1267786142-project-member] [instance: 5ee3346f-50bf-464e-a4dd-afd1edd0052a] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1868.290080] env[61663]: DEBUG nova.compute.manager [None req-e8d01236-f3c0-4a6c-9f03-3fa99139f420 tempest-AttachInterfacesTestJSON-1267786142 tempest-AttachInterfacesTestJSON-1267786142-project-member] [instance: 5ee3346f-50bf-464e-a4dd-afd1edd0052a] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1868.320984] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e8d01236-f3c0-4a6c-9f03-3fa99139f420 tempest-AttachInterfacesTestJSON-1267786142 tempest-AttachInterfacesTestJSON-1267786142-project-member] Lock "5ee3346f-50bf-464e-a4dd-afd1edd0052a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.989s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.330936] env[61663]: DEBUG nova.compute.manager [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1868.400524] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1868.400792] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.406136] env[61663]: INFO nova.compute.claims [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1868.751372] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da589e88-e32e-4c67-93e9-925a1bf3c12b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.758966] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dab34d7-d0c0-47f7-85ca-52f6cfaf5b56 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.793447] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf48643-b79a-4b4f-98ed-d3f42dd1c19b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.801078] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eccf3059-b8ca-4e89-a2b6-f9ed87ad1d5a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.814523] env[61663]: DEBUG nova.compute.provider_tree [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1868.823225] env[61663]: DEBUG nova.scheduler.client.report [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1868.840785] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.440s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.841207] env[61663]: DEBUG nova.compute.manager [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1868.886684] env[61663]: DEBUG nova.compute.utils [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1868.888484] env[61663]: DEBUG nova.compute.manager [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1868.888711] env[61663]: DEBUG nova.network.neutron [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1868.900724] env[61663]: DEBUG nova.compute.manager [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1868.935019] env[61663]: INFO nova.virt.block_device [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Booting with volume 92db3222-ff59-45f0-9961-25b5e6d6e1ca at /dev/sda [ 1868.962980] env[61663]: DEBUG nova.policy [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b91e969e54e04c45a1e792bc8e807217', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9c969a8a5e264a6fa1dc2539fde01b32', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1868.995282] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-398cd144-bc12-4f57-8e01-37dfe60128d2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.004638] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-912fe176-f723-43a3-a1ba-03af21db5477 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.045200] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-86e34bee-7df5-47ee-8af9-5173eeddf7d0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.053474] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a531d410-46f8-44d1-a2dc-3680510ef9f2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.084627] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3cc9ea7-9914-4005-a0f7-8071a1134bff {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.092069] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa3afa0f-dac0-4e63-8ce3-2425fb2ca915 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.107066] env[61663]: DEBUG nova.virt.block_device [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Updating existing volume attachment record: a501b42f-0ea9-496b-a4d9-435df88c4aac {{(pid=61663) _volume_attach /opt/stack/nova/nova/virt/block_device.py:665}} [ 1869.375541] env[61663]: DEBUG nova.compute.manager [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1869.376085] env[61663]: DEBUG nova.virt.hardware [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1869.376428] env[61663]: DEBUG nova.virt.hardware [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1869.376525] env[61663]: DEBUG nova.virt.hardware [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1869.376636] env[61663]: DEBUG nova.virt.hardware [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1869.376784] env[61663]: DEBUG nova.virt.hardware [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1869.377193] env[61663]: DEBUG nova.virt.hardware [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1869.377559] env[61663]: DEBUG nova.virt.hardware [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1869.378042] env[61663]: DEBUG nova.virt.hardware [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1869.378148] env[61663]: DEBUG nova.virt.hardware [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1869.378411] env[61663]: DEBUG nova.virt.hardware [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1869.378705] env[61663]: DEBUG nova.virt.hardware [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1869.379972] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad03894c-7332-4b24-aa6d-b874e90a6d7c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.388446] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8cc768-f7a2-4f22-a0d2-116fc5d8ba17 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.431582] env[61663]: DEBUG nova.network.neutron [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Successfully created port: 5de89de6-e60d-4821-8c6f-dabdd7e26079 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1870.425160] env[61663]: DEBUG nova.compute.manager [req-048850fa-8576-43e3-9493-8142204059bf req-1fa61117-218f-45c6-b648-8fa0fadbef73 service nova] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Received event network-vif-plugged-5de89de6-e60d-4821-8c6f-dabdd7e26079 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1870.425160] env[61663]: DEBUG oslo_concurrency.lockutils [req-048850fa-8576-43e3-9493-8142204059bf req-1fa61117-218f-45c6-b648-8fa0fadbef73 service nova] Acquiring lock "b583b039-84c7-4168-91a1-82821c0001a3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.425160] env[61663]: DEBUG oslo_concurrency.lockutils [req-048850fa-8576-43e3-9493-8142204059bf req-1fa61117-218f-45c6-b648-8fa0fadbef73 service nova] Lock "b583b039-84c7-4168-91a1-82821c0001a3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.425160] env[61663]: DEBUG oslo_concurrency.lockutils [req-048850fa-8576-43e3-9493-8142204059bf req-1fa61117-218f-45c6-b648-8fa0fadbef73 service nova] Lock "b583b039-84c7-4168-91a1-82821c0001a3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.425506] env[61663]: DEBUG nova.compute.manager [req-048850fa-8576-43e3-9493-8142204059bf req-1fa61117-218f-45c6-b648-8fa0fadbef73 service nova] [instance: b583b039-84c7-4168-91a1-82821c0001a3] No waiting events found dispatching network-vif-plugged-5de89de6-e60d-4821-8c6f-dabdd7e26079 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1870.425584] env[61663]: WARNING nova.compute.manager [req-048850fa-8576-43e3-9493-8142204059bf req-1fa61117-218f-45c6-b648-8fa0fadbef73 service nova] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Received unexpected event network-vif-plugged-5de89de6-e60d-4821-8c6f-dabdd7e26079 for instance with vm_state building and task_state spawning. [ 1870.652701] env[61663]: DEBUG nova.network.neutron [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Successfully updated port: 5de89de6-e60d-4821-8c6f-dabdd7e26079 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1870.663474] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Acquiring lock "refresh_cache-b583b039-84c7-4168-91a1-82821c0001a3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1870.665143] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Acquired lock "refresh_cache-b583b039-84c7-4168-91a1-82821c0001a3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1870.665143] env[61663]: DEBUG nova.network.neutron [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1870.729887] env[61663]: DEBUG nova.network.neutron [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1870.974187] env[61663]: DEBUG nova.network.neutron [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Updating instance_info_cache with network_info: [{"id": "5de89de6-e60d-4821-8c6f-dabdd7e26079", "address": "fa:16:3e:28:11:24", "network": {"id": "5b5de519-0bdd-4d4c-abc1-d3d48529f56c", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-575763494-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c969a8a5e264a6fa1dc2539fde01b32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93c5b7ce-4c84-40bc-884c-b2453e0eee69", "external-id": "nsx-vlan-transportzone-882", "segmentation_id": 882, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5de89de6-e6", "ovs_interfaceid": "5de89de6-e60d-4821-8c6f-dabdd7e26079", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1870.989973] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Releasing lock "refresh_cache-b583b039-84c7-4168-91a1-82821c0001a3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1870.990300] env[61663]: DEBUG nova.compute.manager [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Instance network_info: |[{"id": "5de89de6-e60d-4821-8c6f-dabdd7e26079", "address": "fa:16:3e:28:11:24", "network": {"id": "5b5de519-0bdd-4d4c-abc1-d3d48529f56c", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-575763494-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c969a8a5e264a6fa1dc2539fde01b32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93c5b7ce-4c84-40bc-884c-b2453e0eee69", "external-id": "nsx-vlan-transportzone-882", "segmentation_id": 882, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5de89de6-e6", "ovs_interfaceid": "5de89de6-e60d-4821-8c6f-dabdd7e26079", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1870.990732] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:28:11:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '93c5b7ce-4c84-40bc-884c-b2453e0eee69', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5de89de6-e60d-4821-8c6f-dabdd7e26079', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1870.998504] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Creating folder: Project (9c969a8a5e264a6fa1dc2539fde01b32). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1870.999103] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ad1ac337-18c8-4059-a6d9-f1e680df40c8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.013508] env[61663]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1871.013508] env[61663]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=61663) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1871.013632] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Folder already exists: Project (9c969a8a5e264a6fa1dc2539fde01b32). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1871.013766] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Creating folder: Instances. Parent ref: group-v352632. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1871.014022] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8f6251c0-52dd-4938-8159-1a529166bad1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.023759] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Created folder: Instances in parent group-v352632. [ 1871.023887] env[61663]: DEBUG oslo.service.loopingcall [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1871.024074] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1871.024289] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c5cb4398-279e-4280-a382-819ff0aa2096 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.045390] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1871.045390] env[61663]: value = "task-1690783" [ 1871.045390] env[61663]: _type = "Task" [ 1871.045390] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.053626] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690783, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.555383] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690783, 'name': CreateVM_Task, 'duration_secs': 0.309199} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.555771] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1871.562877] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'attachment_id': 'a501b42f-0ea9-496b-a4d9-435df88c4aac', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-352638', 'volume_id': '92db3222-ff59-45f0-9961-25b5e6d6e1ca', 'name': 'volume-92db3222-ff59-45f0-9961-25b5e6d6e1ca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b583b039-84c7-4168-91a1-82821c0001a3', 'attached_at': '', 'detached_at': '', 'volume_id': '92db3222-ff59-45f0-9961-25b5e6d6e1ca', 'serial': '92db3222-ff59-45f0-9961-25b5e6d6e1ca'}, 'delete_on_termination': True, 'device_type': None, 'boot_index': 0, 'disk_bus': None, 'mount_device': '/dev/sda', 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=61663) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1871.563130] env[61663]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Root volume attach. Driver type: vmdk {{(pid=61663) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1871.563945] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24021236-2f05-47fe-84c2-1f85b6dd3753 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.573172] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52d9284-3a70-49ca-a064-ed9ce97accfa {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.579926] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df231abd-ffda-4965-bfaf-1e6811aa9c0f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.585413] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-4e6deb67-04fe-4df1-a629-3042164cebd3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.593290] env[61663]: DEBUG oslo_vmware.api [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Waiting for the task: (returnval){ [ 1871.593290] env[61663]: value = "task-1690784" [ 1871.593290] env[61663]: _type = "Task" [ 1871.593290] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.601931] env[61663]: DEBUG oslo_vmware.api [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Task: {'id': task-1690784, 'name': RelocateVM_Task} progress is 5%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.104429] env[61663]: DEBUG oslo_vmware.api [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Task: {'id': task-1690784, 'name': RelocateVM_Task, 'duration_secs': 0.348321} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.104718] env[61663]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Volume attach. Driver type: vmdk {{(pid=61663) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1872.104916] env[61663]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-352638', 'volume_id': '92db3222-ff59-45f0-9961-25b5e6d6e1ca', 'name': 'volume-92db3222-ff59-45f0-9961-25b5e6d6e1ca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b583b039-84c7-4168-91a1-82821c0001a3', 'attached_at': '', 'detached_at': '', 'volume_id': '92db3222-ff59-45f0-9961-25b5e6d6e1ca', 'serial': '92db3222-ff59-45f0-9961-25b5e6d6e1ca'} {{(pid=61663) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1872.105778] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2347b853-d437-4689-ae50-444f6f57da49 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.123374] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49bbff74-f8f3-42fd-b46a-1c32050dc85d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.150083] env[61663]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] volume-92db3222-ff59-45f0-9961-25b5e6d6e1ca/volume-92db3222-ff59-45f0-9961-25b5e6d6e1ca.vmdk or device None with type thin {{(pid=61663) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1872.150395] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-13155510-20e5-4086-bc3f-6b0c8fe1daea {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.171627] env[61663]: DEBUG oslo_vmware.api [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Waiting for the task: (returnval){ [ 1872.171627] env[61663]: value = "task-1690785" [ 1872.171627] env[61663]: _type = "Task" [ 1872.171627] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.181441] env[61663]: DEBUG oslo_vmware.api [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Task: {'id': task-1690785, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.561172] env[61663]: DEBUG nova.compute.manager [req-434222bf-5527-42e5-a25d-1d3d77ae39a3 req-58bcf830-f5c5-4ff4-9a96-c38802e89d76 service nova] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Received event network-changed-5de89de6-e60d-4821-8c6f-dabdd7e26079 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1872.561471] env[61663]: DEBUG nova.compute.manager [req-434222bf-5527-42e5-a25d-1d3d77ae39a3 req-58bcf830-f5c5-4ff4-9a96-c38802e89d76 service nova] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Refreshing instance network info cache due to event network-changed-5de89de6-e60d-4821-8c6f-dabdd7e26079. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1872.561589] env[61663]: DEBUG oslo_concurrency.lockutils [req-434222bf-5527-42e5-a25d-1d3d77ae39a3 req-58bcf830-f5c5-4ff4-9a96-c38802e89d76 service nova] Acquiring lock "refresh_cache-b583b039-84c7-4168-91a1-82821c0001a3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1872.561732] env[61663]: DEBUG oslo_concurrency.lockutils [req-434222bf-5527-42e5-a25d-1d3d77ae39a3 req-58bcf830-f5c5-4ff4-9a96-c38802e89d76 service nova] Acquired lock "refresh_cache-b583b039-84c7-4168-91a1-82821c0001a3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1872.561894] env[61663]: DEBUG nova.network.neutron [req-434222bf-5527-42e5-a25d-1d3d77ae39a3 req-58bcf830-f5c5-4ff4-9a96-c38802e89d76 service nova] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Refreshing network info cache for port 5de89de6-e60d-4821-8c6f-dabdd7e26079 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1872.683954] env[61663]: DEBUG oslo_vmware.api [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Task: {'id': task-1690785, 'name': ReconfigVM_Task, 'duration_secs': 0.262638} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.686078] env[61663]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Reconfigured VM instance instance-0000003a to attach disk [datastore1] volume-92db3222-ff59-45f0-9961-25b5e6d6e1ca/volume-92db3222-ff59-45f0-9961-25b5e6d6e1ca.vmdk or device None with type thin {{(pid=61663) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1872.692639] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0cef47c1-c669-4d71-944d-2cdcdd58f036 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.710502] env[61663]: DEBUG oslo_vmware.api [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Waiting for the task: (returnval){ [ 1872.710502] env[61663]: value = "task-1690786" [ 1872.710502] env[61663]: _type = "Task" [ 1872.710502] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.719740] env[61663]: DEBUG oslo_vmware.api [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Task: {'id': task-1690786, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.220836] env[61663]: DEBUG oslo_vmware.api [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Task: {'id': task-1690786, 'name': ReconfigVM_Task, 'duration_secs': 0.112675} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.221159] env[61663]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-352638', 'volume_id': '92db3222-ff59-45f0-9961-25b5e6d6e1ca', 'name': 'volume-92db3222-ff59-45f0-9961-25b5e6d6e1ca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b583b039-84c7-4168-91a1-82821c0001a3', 'attached_at': '', 'detached_at': '', 'volume_id': '92db3222-ff59-45f0-9961-25b5e6d6e1ca', 'serial': '92db3222-ff59-45f0-9961-25b5e6d6e1ca'} {{(pid=61663) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1873.221761] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c79730b0-ec31-46ed-842d-407e22e23ea7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.227934] env[61663]: DEBUG oslo_vmware.api [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Waiting for the task: (returnval){ [ 1873.227934] env[61663]: value = "task-1690787" [ 1873.227934] env[61663]: _type = "Task" [ 1873.227934] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.235588] env[61663]: DEBUG oslo_vmware.api [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Task: {'id': task-1690787, 'name': Rename_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.404843] env[61663]: DEBUG nova.network.neutron [req-434222bf-5527-42e5-a25d-1d3d77ae39a3 req-58bcf830-f5c5-4ff4-9a96-c38802e89d76 service nova] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Updated VIF entry in instance network info cache for port 5de89de6-e60d-4821-8c6f-dabdd7e26079. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1873.405225] env[61663]: DEBUG nova.network.neutron [req-434222bf-5527-42e5-a25d-1d3d77ae39a3 req-58bcf830-f5c5-4ff4-9a96-c38802e89d76 service nova] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Updating instance_info_cache with network_info: [{"id": "5de89de6-e60d-4821-8c6f-dabdd7e26079", "address": "fa:16:3e:28:11:24", "network": {"id": "5b5de519-0bdd-4d4c-abc1-d3d48529f56c", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-575763494-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c969a8a5e264a6fa1dc2539fde01b32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93c5b7ce-4c84-40bc-884c-b2453e0eee69", "external-id": "nsx-vlan-transportzone-882", "segmentation_id": 882, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5de89de6-e6", "ovs_interfaceid": "5de89de6-e60d-4821-8c6f-dabdd7e26079", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1873.425574] env[61663]: DEBUG oslo_concurrency.lockutils [req-434222bf-5527-42e5-a25d-1d3d77ae39a3 req-58bcf830-f5c5-4ff4-9a96-c38802e89d76 service nova] Releasing lock "refresh_cache-b583b039-84c7-4168-91a1-82821c0001a3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1873.739054] env[61663]: DEBUG oslo_vmware.api [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Task: {'id': task-1690787, 'name': Rename_Task, 'duration_secs': 0.123263} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.739436] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Powering on the VM {{(pid=61663) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1873.739722] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e7bc64cb-3c02-46a9-aaab-984f9e9a66d3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.746482] env[61663]: DEBUG oslo_vmware.api [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Waiting for the task: (returnval){ [ 1873.746482] env[61663]: value = "task-1690788" [ 1873.746482] env[61663]: _type = "Task" [ 1873.746482] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.757334] env[61663]: DEBUG oslo_vmware.api [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Task: {'id': task-1690788, 'name': PowerOnVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.256691] env[61663]: DEBUG oslo_vmware.api [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Task: {'id': task-1690788, 'name': PowerOnVM_Task, 'duration_secs': 0.471143} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.256979] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Powered on the VM {{(pid=61663) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1874.257220] env[61663]: INFO nova.compute.manager [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Took 4.88 seconds to spawn the instance on the hypervisor. [ 1874.257481] env[61663]: DEBUG nova.compute.manager [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Checking state {{(pid=61663) _get_power_state /opt/stack/nova/nova/compute/manager.py:1782}} [ 1874.258253] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beb12e13-cb8f-495d-8ce0-baf0532f52ee {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.316654] env[61663]: INFO nova.compute.manager [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Took 5.94 seconds to build instance. [ 1874.334447] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e7ff8ad-13a6-44f3-ad76-18e40c996296 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Lock "b583b039-84c7-4168-91a1-82821c0001a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 189.378s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1874.348822] env[61663]: DEBUG nova.compute.manager [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1874.409767] env[61663]: DEBUG oslo_concurrency.lockutils [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1874.410059] env[61663]: DEBUG oslo_concurrency.lockutils [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1874.411602] env[61663]: INFO nova.compute.claims [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1874.771393] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b5483b-649c-493a-b63c-10d0633ab3d4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.784678] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3528c37b-bfa2-41a1-aefd-069cc967c864 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.828433] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4966808e-4d51-4f62-a0fb-9bd48da679a0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.835212] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa802486-aa25-40f6-9180-fa354d771418 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.852843] env[61663]: DEBUG nova.compute.provider_tree [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1874.866424] env[61663]: DEBUG nova.scheduler.client.report [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1874.885019] env[61663]: DEBUG oslo_concurrency.lockutils [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.473s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1874.885019] env[61663]: DEBUG nova.compute.manager [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1874.923020] env[61663]: DEBUG nova.compute.utils [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1874.923683] env[61663]: DEBUG nova.compute.manager [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1874.923993] env[61663]: DEBUG nova.network.neutron [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1874.932867] env[61663]: DEBUG nova.compute.manager [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1875.003023] env[61663]: DEBUG nova.compute.manager [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1875.037054] env[61663]: DEBUG nova.virt.hardware [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1875.037627] env[61663]: DEBUG nova.virt.hardware [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1875.037957] env[61663]: DEBUG nova.virt.hardware [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1875.038337] env[61663]: DEBUG nova.virt.hardware [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1875.038680] env[61663]: DEBUG nova.virt.hardware [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1875.038988] env[61663]: DEBUG nova.virt.hardware [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1875.039367] env[61663]: DEBUG nova.virt.hardware [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1875.039691] env[61663]: DEBUG nova.virt.hardware [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1875.040035] env[61663]: DEBUG nova.virt.hardware [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1875.040372] env[61663]: DEBUG nova.virt.hardware [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1875.040709] env[61663]: DEBUG nova.virt.hardware [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1875.042113] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8338d5a5-fd56-4a69-94b8-ecdd83be6c1e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.054023] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c7d0416-c2f9-4140-9051-5194d1d7f1b6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.301134] env[61663]: DEBUG nova.policy [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '870d631c1dcb4ac49b1ac723125b5a3b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ac40d7a432c24211ab3325679ab4dec9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1876.107264] env[61663]: DEBUG nova.network.neutron [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Successfully created port: 491e4db8-e68a-4d96-ab44-868fdcef9229 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1877.476019] env[61663]: DEBUG nova.compute.manager [req-d3b7fc95-4f21-425d-922c-93c5223898d0 req-31b6f285-a89f-4f04-98a8-8521cf4639a7 service nova] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Received event network-changed-5de89de6-e60d-4821-8c6f-dabdd7e26079 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1877.476019] env[61663]: DEBUG nova.compute.manager [req-d3b7fc95-4f21-425d-922c-93c5223898d0 req-31b6f285-a89f-4f04-98a8-8521cf4639a7 service nova] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Refreshing instance network info cache due to event network-changed-5de89de6-e60d-4821-8c6f-dabdd7e26079. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1877.476019] env[61663]: DEBUG oslo_concurrency.lockutils [req-d3b7fc95-4f21-425d-922c-93c5223898d0 req-31b6f285-a89f-4f04-98a8-8521cf4639a7 service nova] Acquiring lock "refresh_cache-b583b039-84c7-4168-91a1-82821c0001a3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1877.476019] env[61663]: DEBUG oslo_concurrency.lockutils [req-d3b7fc95-4f21-425d-922c-93c5223898d0 req-31b6f285-a89f-4f04-98a8-8521cf4639a7 service nova] Acquired lock "refresh_cache-b583b039-84c7-4168-91a1-82821c0001a3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1877.476019] env[61663]: DEBUG nova.network.neutron [req-d3b7fc95-4f21-425d-922c-93c5223898d0 req-31b6f285-a89f-4f04-98a8-8521cf4639a7 service nova] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Refreshing network info cache for port 5de89de6-e60d-4821-8c6f-dabdd7e26079 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1878.231815] env[61663]: DEBUG nova.network.neutron [req-d3b7fc95-4f21-425d-922c-93c5223898d0 req-31b6f285-a89f-4f04-98a8-8521cf4639a7 service nova] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Updated VIF entry in instance network info cache for port 5de89de6-e60d-4821-8c6f-dabdd7e26079. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1878.232202] env[61663]: DEBUG nova.network.neutron [req-d3b7fc95-4f21-425d-922c-93c5223898d0 req-31b6f285-a89f-4f04-98a8-8521cf4639a7 service nova] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Updating instance_info_cache with network_info: [{"id": "5de89de6-e60d-4821-8c6f-dabdd7e26079", "address": "fa:16:3e:28:11:24", "network": {"id": "5b5de519-0bdd-4d4c-abc1-d3d48529f56c", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-575763494-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c969a8a5e264a6fa1dc2539fde01b32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93c5b7ce-4c84-40bc-884c-b2453e0eee69", "external-id": "nsx-vlan-transportzone-882", "segmentation_id": 882, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5de89de6-e6", "ovs_interfaceid": "5de89de6-e60d-4821-8c6f-dabdd7e26079", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1878.246219] env[61663]: DEBUG oslo_concurrency.lockutils [req-d3b7fc95-4f21-425d-922c-93c5223898d0 req-31b6f285-a89f-4f04-98a8-8521cf4639a7 service nova] Releasing lock "refresh_cache-b583b039-84c7-4168-91a1-82821c0001a3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1878.396163] env[61663]: DEBUG nova.network.neutron [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Successfully updated port: 491e4db8-e68a-4d96-ab44-868fdcef9229 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1878.409794] env[61663]: DEBUG oslo_concurrency.lockutils [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Acquiring lock "refresh_cache-b51a331f-2b96-457f-9c9e-99379e8ae7fb" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1878.409957] env[61663]: DEBUG oslo_concurrency.lockutils [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Acquired lock "refresh_cache-b51a331f-2b96-457f-9c9e-99379e8ae7fb" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1878.410127] env[61663]: DEBUG nova.network.neutron [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1878.488973] env[61663]: DEBUG nova.network.neutron [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1878.723885] env[61663]: DEBUG nova.network.neutron [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Updating instance_info_cache with network_info: [{"id": "491e4db8-e68a-4d96-ab44-868fdcef9229", "address": "fa:16:3e:99:78:ca", "network": {"id": "47419cbb-d608-4147-8059-380d499b9664", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1097694648-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac40d7a432c24211ab3325679ab4dec9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90878b7b-ddb7-4f47-892b-d6e06f73475f", "external-id": "nsx-vlan-transportzone-849", "segmentation_id": 849, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap491e4db8-e6", "ovs_interfaceid": "491e4db8-e68a-4d96-ab44-868fdcef9229", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1878.734945] env[61663]: DEBUG oslo_concurrency.lockutils [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Releasing lock "refresh_cache-b51a331f-2b96-457f-9c9e-99379e8ae7fb" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1878.736389] env[61663]: DEBUG nova.compute.manager [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Instance network_info: |[{"id": "491e4db8-e68a-4d96-ab44-868fdcef9229", "address": "fa:16:3e:99:78:ca", "network": {"id": "47419cbb-d608-4147-8059-380d499b9664", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1097694648-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac40d7a432c24211ab3325679ab4dec9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90878b7b-ddb7-4f47-892b-d6e06f73475f", "external-id": "nsx-vlan-transportzone-849", "segmentation_id": 849, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap491e4db8-e6", "ovs_interfaceid": "491e4db8-e68a-4d96-ab44-868fdcef9229", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1878.736536] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:78:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '90878b7b-ddb7-4f47-892b-d6e06f73475f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '491e4db8-e68a-4d96-ab44-868fdcef9229', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1878.743195] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Creating folder: Project (ac40d7a432c24211ab3325679ab4dec9). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1878.743706] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5dcd1c36-ac78-43f4-9c32-635a192a1daf {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.753354] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Created folder: Project (ac40d7a432c24211ab3325679ab4dec9) in parent group-v352575. [ 1878.753535] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Creating folder: Instances. Parent ref: group-v352648. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1878.753744] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-69aec98b-c916-4eda-b89a-10770bde7eff {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.762357] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Created folder: Instances in parent group-v352648. [ 1878.762573] env[61663]: DEBUG oslo.service.loopingcall [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1878.762745] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1878.762922] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cb82a39b-11e7-43a5-bec2-791b4bc30df5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.780916] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1878.780916] env[61663]: value = "task-1690791" [ 1878.780916] env[61663]: _type = "Task" [ 1878.780916] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.790941] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690791, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.291061] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690791, 'name': CreateVM_Task, 'duration_secs': 0.350123} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.291305] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1879.292144] env[61663]: DEBUG oslo_concurrency.lockutils [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1879.292315] env[61663]: DEBUG oslo_concurrency.lockutils [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1879.292765] env[61663]: DEBUG oslo_concurrency.lockutils [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1879.293136] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ecf868e-04fe-4b2e-82f0-9a149008add7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.298847] env[61663]: DEBUG oslo_vmware.api [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Waiting for the task: (returnval){ [ 1879.298847] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528e82a8-fe91-b324-7518-9715eb1d76a3" [ 1879.298847] env[61663]: _type = "Task" [ 1879.298847] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.307422] env[61663]: DEBUG oslo_vmware.api [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528e82a8-fe91-b324-7518-9715eb1d76a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.502852] env[61663]: DEBUG nova.compute.manager [req-35aed8ed-33a0-4873-9a08-7b3a059a7aea req-87b0e844-c2b2-4424-a460-fbb6c98bab45 service nova] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Received event network-vif-plugged-491e4db8-e68a-4d96-ab44-868fdcef9229 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1879.503134] env[61663]: DEBUG oslo_concurrency.lockutils [req-35aed8ed-33a0-4873-9a08-7b3a059a7aea req-87b0e844-c2b2-4424-a460-fbb6c98bab45 service nova] Acquiring lock "b51a331f-2b96-457f-9c9e-99379e8ae7fb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1879.503294] env[61663]: DEBUG oslo_concurrency.lockutils [req-35aed8ed-33a0-4873-9a08-7b3a059a7aea req-87b0e844-c2b2-4424-a460-fbb6c98bab45 service nova] Lock "b51a331f-2b96-457f-9c9e-99379e8ae7fb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1879.503462] env[61663]: DEBUG oslo_concurrency.lockutils [req-35aed8ed-33a0-4873-9a08-7b3a059a7aea req-87b0e844-c2b2-4424-a460-fbb6c98bab45 service nova] Lock "b51a331f-2b96-457f-9c9e-99379e8ae7fb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1879.503629] env[61663]: DEBUG nova.compute.manager [req-35aed8ed-33a0-4873-9a08-7b3a059a7aea req-87b0e844-c2b2-4424-a460-fbb6c98bab45 service nova] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] No waiting events found dispatching network-vif-plugged-491e4db8-e68a-4d96-ab44-868fdcef9229 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1879.503795] env[61663]: WARNING nova.compute.manager [req-35aed8ed-33a0-4873-9a08-7b3a059a7aea req-87b0e844-c2b2-4424-a460-fbb6c98bab45 service nova] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Received unexpected event network-vif-plugged-491e4db8-e68a-4d96-ab44-868fdcef9229 for instance with vm_state building and task_state spawning. [ 1879.503955] env[61663]: DEBUG nova.compute.manager [req-35aed8ed-33a0-4873-9a08-7b3a059a7aea req-87b0e844-c2b2-4424-a460-fbb6c98bab45 service nova] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Received event network-changed-491e4db8-e68a-4d96-ab44-868fdcef9229 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1879.504131] env[61663]: DEBUG nova.compute.manager [req-35aed8ed-33a0-4873-9a08-7b3a059a7aea req-87b0e844-c2b2-4424-a460-fbb6c98bab45 service nova] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Refreshing instance network info cache due to event network-changed-491e4db8-e68a-4d96-ab44-868fdcef9229. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1879.504336] env[61663]: DEBUG oslo_concurrency.lockutils [req-35aed8ed-33a0-4873-9a08-7b3a059a7aea req-87b0e844-c2b2-4424-a460-fbb6c98bab45 service nova] Acquiring lock "refresh_cache-b51a331f-2b96-457f-9c9e-99379e8ae7fb" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1879.504471] env[61663]: DEBUG oslo_concurrency.lockutils [req-35aed8ed-33a0-4873-9a08-7b3a059a7aea req-87b0e844-c2b2-4424-a460-fbb6c98bab45 service nova] Acquired lock "refresh_cache-b51a331f-2b96-457f-9c9e-99379e8ae7fb" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1879.504614] env[61663]: DEBUG nova.network.neutron [req-35aed8ed-33a0-4873-9a08-7b3a059a7aea req-87b0e844-c2b2-4424-a460-fbb6c98bab45 service nova] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Refreshing network info cache for port 491e4db8-e68a-4d96-ab44-868fdcef9229 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1879.809781] env[61663]: DEBUG oslo_concurrency.lockutils [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1879.810093] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1879.810250] env[61663]: DEBUG oslo_concurrency.lockutils [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1879.923592] env[61663]: DEBUG nova.network.neutron [req-35aed8ed-33a0-4873-9a08-7b3a059a7aea req-87b0e844-c2b2-4424-a460-fbb6c98bab45 service nova] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Updated VIF entry in instance network info cache for port 491e4db8-e68a-4d96-ab44-868fdcef9229. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1879.923949] env[61663]: DEBUG nova.network.neutron [req-35aed8ed-33a0-4873-9a08-7b3a059a7aea req-87b0e844-c2b2-4424-a460-fbb6c98bab45 service nova] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Updating instance_info_cache with network_info: [{"id": "491e4db8-e68a-4d96-ab44-868fdcef9229", "address": "fa:16:3e:99:78:ca", "network": {"id": "47419cbb-d608-4147-8059-380d499b9664", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1097694648-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac40d7a432c24211ab3325679ab4dec9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90878b7b-ddb7-4f47-892b-d6e06f73475f", "external-id": "nsx-vlan-transportzone-849", "segmentation_id": 849, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap491e4db8-e6", "ovs_interfaceid": "491e4db8-e68a-4d96-ab44-868fdcef9229", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1879.933307] env[61663]: DEBUG oslo_concurrency.lockutils [req-35aed8ed-33a0-4873-9a08-7b3a059a7aea req-87b0e844-c2b2-4424-a460-fbb6c98bab45 service nova] Releasing lock "refresh_cache-b51a331f-2b96-457f-9c9e-99379e8ae7fb" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1881.913677] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e1dca3d1-734a-48b0-b149-4cd7009daf3a tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Acquiring lock "b51a331f-2b96-457f-9c9e-99379e8ae7fb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1885.723780] env[61663]: DEBUG oslo_concurrency.lockutils [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Acquiring lock "525749ba-7de2-4ec5-8f7b-1f4c291710fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1885.724076] env[61663]: DEBUG oslo_concurrency.lockutils [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Lock "525749ba-7de2-4ec5-8f7b-1f4c291710fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.244767] env[61663]: INFO nova.compute.manager [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Rebuilding instance [ 1893.303055] env[61663]: DEBUG nova.compute.manager [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Checking state {{(pid=61663) _get_power_state /opt/stack/nova/nova/compute/manager.py:1782}} [ 1893.303915] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-558f8df4-b791-475f-96e6-e63dde5ae5c9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.350851] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Powering off the VM {{(pid=61663) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1893.351440] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7c8501f3-55f8-4d3c-a23a-bc20c080f7d1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.358534] env[61663]: DEBUG oslo_vmware.api [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Waiting for the task: (returnval){ [ 1893.358534] env[61663]: value = "task-1690792" [ 1893.358534] env[61663]: _type = "Task" [ 1893.358534] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.368467] env[61663]: DEBUG oslo_vmware.api [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Task: {'id': task-1690792, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.870482] env[61663]: DEBUG oslo_vmware.api [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Task: {'id': task-1690792, 'name': PowerOffVM_Task, 'duration_secs': 0.166706} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.870742] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Powered off the VM {{(pid=61663) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1893.871479] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Powering off the VM {{(pid=61663) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1893.871726] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f052a9d-ff05-471b-9a47-5d603f9c052a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.879049] env[61663]: DEBUG oslo_vmware.api [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Waiting for the task: (returnval){ [ 1893.879049] env[61663]: value = "task-1690793" [ 1893.879049] env[61663]: _type = "Task" [ 1893.879049] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.886719] env[61663]: DEBUG oslo_vmware.api [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Task: {'id': task-1690793, 'name': PowerOffVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.389650] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] VM already powered off {{(pid=61663) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1509}} [ 1894.389932] env[61663]: DEBUG nova.virt.vmwareapi.volumeops [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Volume detach. Driver type: vmdk {{(pid=61663) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1894.390152] env[61663]: DEBUG nova.virt.vmwareapi.volumeops [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-352638', 'volume_id': '92db3222-ff59-45f0-9961-25b5e6d6e1ca', 'name': 'volume-92db3222-ff59-45f0-9961-25b5e6d6e1ca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b583b039-84c7-4168-91a1-82821c0001a3', 'attached_at': '', 'detached_at': '', 'volume_id': '92db3222-ff59-45f0-9961-25b5e6d6e1ca', 'serial': '92db3222-ff59-45f0-9961-25b5e6d6e1ca'} {{(pid=61663) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1894.390914] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-527a117e-da1b-4b78-b57b-6ec1e14b4729 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.408528] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b2c6015-2cc6-4913-a9b7-99a129d588a0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.414754] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-174e31b1-4ead-4143-8f80-03b23355234b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.431328] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f220de1-3718-4b68-8a2c-5fa362e1d973 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.445480] env[61663]: DEBUG nova.virt.vmwareapi.volumeops [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] The volume has not been displaced from its original location: [datastore1] volume-92db3222-ff59-45f0-9961-25b5e6d6e1ca/volume-92db3222-ff59-45f0-9961-25b5e6d6e1ca.vmdk. No consolidation needed. {{(pid=61663) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1894.450678] env[61663]: DEBUG nova.virt.vmwareapi.volumeops [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Reconfiguring VM instance instance-0000003a to detach disk 2000 {{(pid=61663) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1894.450950] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aacd7504-9c4b-4cd9-a80c-2d1f57611894 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.469043] env[61663]: DEBUG oslo_vmware.api [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Waiting for the task: (returnval){ [ 1894.469043] env[61663]: value = "task-1690794" [ 1894.469043] env[61663]: _type = "Task" [ 1894.469043] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.477582] env[61663]: DEBUG oslo_vmware.api [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Task: {'id': task-1690794, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.692490] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1894.692716] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1894.978228] env[61663]: DEBUG oslo_vmware.api [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Task: {'id': task-1690794, 'name': ReconfigVM_Task, 'duration_secs': 0.1881} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.978523] env[61663]: DEBUG nova.virt.vmwareapi.volumeops [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Reconfigured VM instance instance-0000003a to detach disk 2000 {{(pid=61663) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1894.983130] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0468a4a6-931a-4185-80b2-e198942d7992 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.001626] env[61663]: DEBUG oslo_vmware.api [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Waiting for the task: (returnval){ [ 1895.001626] env[61663]: value = "task-1690795" [ 1895.001626] env[61663]: _type = "Task" [ 1895.001626] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.009864] env[61663]: DEBUG oslo_vmware.api [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Task: {'id': task-1690795, 'name': ReconfigVM_Task} progress is 5%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.511833] env[61663]: DEBUG oslo_vmware.api [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Task: {'id': task-1690795, 'name': ReconfigVM_Task, 'duration_secs': 0.11582} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.512209] env[61663]: DEBUG nova.virt.vmwareapi.volumeops [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-352638', 'volume_id': '92db3222-ff59-45f0-9961-25b5e6d6e1ca', 'name': 'volume-92db3222-ff59-45f0-9961-25b5e6d6e1ca', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b583b039-84c7-4168-91a1-82821c0001a3', 'attached_at': '', 'detached_at': '', 'volume_id': '92db3222-ff59-45f0-9961-25b5e6d6e1ca', 'serial': '92db3222-ff59-45f0-9961-25b5e6d6e1ca'} {{(pid=61663) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1895.512499] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1895.513250] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d8565df-fb99-43f4-846a-b50fed863e07 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.519521] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1895.519800] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-070e932b-8e59-40bc-8d85-9be7cee0209c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.583363] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1895.583589] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1895.583773] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Deleting the datastore file [datastore1] b583b039-84c7-4168-91a1-82821c0001a3 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1895.584193] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a4ced61d-14ab-425c-9bf5-4c4bcdc1095f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.590703] env[61663]: DEBUG oslo_vmware.api [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Waiting for the task: (returnval){ [ 1895.590703] env[61663]: value = "task-1690797" [ 1895.590703] env[61663]: _type = "Task" [ 1895.590703] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.598424] env[61663]: DEBUG oslo_vmware.api [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Task: {'id': task-1690797, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.692158] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1895.692348] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1895.692471] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1895.715909] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 04488672-86c4-415b-961e-94641d570112] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1895.716086] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1895.716223] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1895.716351] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1895.716473] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1895.716593] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1895.716710] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1895.716828] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1895.716943] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1895.717077] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1895.717281] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "refresh_cache-b583b039-84c7-4168-91a1-82821c0001a3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.717398] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "refresh_cache-b583b039-84c7-4168-91a1-82821c0001a3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.717527] env[61663]: DEBUG nova.network.neutron [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Forcefully refreshing network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1895.717695] env[61663]: DEBUG nova.objects.instance [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lazy-loading 'info_cache' on Instance uuid b583b039-84c7-4168-91a1-82821c0001a3 {{(pid=61663) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1896.010807] env[61663]: DEBUG nova.network.neutron [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Updating instance_info_cache with network_info: [{"id": "5de89de6-e60d-4821-8c6f-dabdd7e26079", "address": "fa:16:3e:28:11:24", "network": {"id": "5b5de519-0bdd-4d4c-abc1-d3d48529f56c", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-575763494-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9c969a8a5e264a6fa1dc2539fde01b32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93c5b7ce-4c84-40bc-884c-b2453e0eee69", "external-id": "nsx-vlan-transportzone-882", "segmentation_id": 882, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5de89de6-e6", "ovs_interfaceid": "5de89de6-e60d-4821-8c6f-dabdd7e26079", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1896.020953] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "refresh_cache-b583b039-84c7-4168-91a1-82821c0001a3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1896.021188] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Updated the network info_cache for instance {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9990}} [ 1896.101011] env[61663]: DEBUG oslo_vmware.api [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Task: {'id': task-1690797, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.085525} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.101285] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1896.101594] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1896.101684] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1896.160949] env[61663]: DEBUG nova.virt.vmwareapi.volumeops [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Volume detach. Driver type: vmdk {{(pid=61663) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1896.161308] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-17651496-0c5e-4aed-b013-8a71a231602d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.169842] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806b8abd-93bb-4af4-b9ff-652d709efc08 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.198126] env[61663]: ERROR nova.compute.manager [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Failed to detach volume 92db3222-ff59-45f0-9961-25b5e6d6e1ca from /dev/sda: nova.exception.InstanceNotFound: Instance b583b039-84c7-4168-91a1-82821c0001a3 could not be found. [ 1896.198126] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] Traceback (most recent call last): [ 1896.198126] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/nova/nova/compute/manager.py", line 4133, in _do_rebuild_instance [ 1896.198126] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] self.driver.rebuild(**kwargs) [ 1896.198126] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/nova/nova/virt/driver.py", line 390, in rebuild [ 1896.198126] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] raise NotImplementedError() [ 1896.198126] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] NotImplementedError [ 1896.198126] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] [ 1896.198126] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] During handling of the above exception, another exception occurred: [ 1896.198126] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] [ 1896.198126] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] Traceback (most recent call last): [ 1896.198126] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/nova/nova/compute/manager.py", line 3556, in _detach_root_volume [ 1896.198126] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] self.driver.detach_volume(context, old_connection_info, [ 1896.198794] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 552, in detach_volume [ 1896.198794] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] return self._volumeops.detach_volume(connection_info, instance) [ 1896.198794] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1896.198794] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] self._detach_volume_vmdk(connection_info, instance) [ 1896.198794] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1896.198794] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1896.198794] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1135, in get_vm_ref [ 1896.198794] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] stable_ref.fetch_moref(session) [ 1896.198794] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1126, in fetch_moref [ 1896.198794] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1896.198794] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] nova.exception.InstanceNotFound: Instance b583b039-84c7-4168-91a1-82821c0001a3 could not be found. [ 1896.198794] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] [ 1896.338819] env[61663]: DEBUG nova.compute.utils [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Build of instance b583b039-84c7-4168-91a1-82821c0001a3 aborted: Failed to rebuild volume backed instance. {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1896.341399] env[61663]: ERROR nova.compute.manager [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance b583b039-84c7-4168-91a1-82821c0001a3 aborted: Failed to rebuild volume backed instance. [ 1896.341399] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] Traceback (most recent call last): [ 1896.341399] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/nova/nova/compute/manager.py", line 4133, in _do_rebuild_instance [ 1896.341399] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] self.driver.rebuild(**kwargs) [ 1896.341399] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/nova/nova/virt/driver.py", line 390, in rebuild [ 1896.341399] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] raise NotImplementedError() [ 1896.341399] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] NotImplementedError [ 1896.341399] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] [ 1896.341399] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] During handling of the above exception, another exception occurred: [ 1896.341399] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] [ 1896.341399] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] Traceback (most recent call last): [ 1896.341399] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/nova/nova/compute/manager.py", line 3591, in _rebuild_volume_backed_instance [ 1896.341849] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] self._detach_root_volume(context, instance, root_bdm) [ 1896.341849] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/nova/nova/compute/manager.py", line 3570, in _detach_root_volume [ 1896.341849] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] with excutils.save_and_reraise_exception(): [ 1896.341849] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1896.341849] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] self.force_reraise() [ 1896.341849] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1896.341849] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] raise self.value [ 1896.341849] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/nova/nova/compute/manager.py", line 3556, in _detach_root_volume [ 1896.341849] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] self.driver.detach_volume(context, old_connection_info, [ 1896.341849] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 552, in detach_volume [ 1896.341849] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] return self._volumeops.detach_volume(connection_info, instance) [ 1896.341849] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1896.341849] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] self._detach_volume_vmdk(connection_info, instance) [ 1896.342258] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1896.342258] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1896.342258] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1135, in get_vm_ref [ 1896.342258] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] stable_ref.fetch_moref(session) [ 1896.342258] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1126, in fetch_moref [ 1896.342258] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1896.342258] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] nova.exception.InstanceNotFound: Instance b583b039-84c7-4168-91a1-82821c0001a3 could not be found. [ 1896.342258] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] [ 1896.342258] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] During handling of the above exception, another exception occurred: [ 1896.342258] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] [ 1896.342258] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] Traceback (most recent call last): [ 1896.342258] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/nova/nova/compute/manager.py", line 10856, in _error_out_instance_on_exception [ 1896.342258] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] yield [ 1896.342258] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/nova/nova/compute/manager.py", line 3859, in rebuild_instance [ 1896.342672] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] self._do_rebuild_instance_with_claim( [ 1896.342672] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/nova/nova/compute/manager.py", line 3945, in _do_rebuild_instance_with_claim [ 1896.342672] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] self._do_rebuild_instance( [ 1896.342672] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/nova/nova/compute/manager.py", line 4137, in _do_rebuild_instance [ 1896.342672] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] self._rebuild_default_impl(**kwargs) [ 1896.342672] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/nova/nova/compute/manager.py", line 3714, in _rebuild_default_impl [ 1896.342672] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] self._rebuild_volume_backed_instance( [ 1896.342672] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] File "/opt/stack/nova/nova/compute/manager.py", line 3606, in _rebuild_volume_backed_instance [ 1896.342672] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] raise exception.BuildAbortException( [ 1896.342672] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] nova.exception.BuildAbortException: Build of instance b583b039-84c7-4168-91a1-82821c0001a3 aborted: Failed to rebuild volume backed instance. [ 1896.342672] env[61663]: ERROR nova.compute.manager [instance: b583b039-84c7-4168-91a1-82821c0001a3] [ 1896.430089] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1896.430370] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1896.630137] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1178ec2e-7d66-49c9-98cf-974089fa01ec {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.637253] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f2b225-5ea9-4034-8e60-256f645c6ed0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.665868] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f981740-9fa3-48f6-a184-62372797d85e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.672513] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e4388c-0b88-41e7-8f16-c31049bf00a5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.684885] env[61663]: DEBUG nova.compute.provider_tree [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1896.693426] env[61663]: DEBUG nova.scheduler.client.report [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1896.710043] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.280s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1896.710242] env[61663]: INFO nova.compute.manager [None req-4271b552-8638-417a-9616-eac098f7658c tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Successfully reverted task state from rebuilding on failure for instance. [ 1896.965042] env[61663]: DEBUG oslo_concurrency.lockutils [None req-82a16ffb-2871-4d18-8383-54dbada7d2b1 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Acquiring lock "b583b039-84c7-4168-91a1-82821c0001a3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1896.965042] env[61663]: DEBUG oslo_concurrency.lockutils [None req-82a16ffb-2871-4d18-8383-54dbada7d2b1 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Lock "b583b039-84c7-4168-91a1-82821c0001a3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1896.965042] env[61663]: DEBUG oslo_concurrency.lockutils [None req-82a16ffb-2871-4d18-8383-54dbada7d2b1 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Acquiring lock "b583b039-84c7-4168-91a1-82821c0001a3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1896.965264] env[61663]: DEBUG oslo_concurrency.lockutils [None req-82a16ffb-2871-4d18-8383-54dbada7d2b1 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Lock "b583b039-84c7-4168-91a1-82821c0001a3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1896.965264] env[61663]: DEBUG oslo_concurrency.lockutils [None req-82a16ffb-2871-4d18-8383-54dbada7d2b1 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Lock "b583b039-84c7-4168-91a1-82821c0001a3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1896.967126] env[61663]: INFO nova.compute.manager [None req-82a16ffb-2871-4d18-8383-54dbada7d2b1 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Terminating instance [ 1896.969128] env[61663]: DEBUG nova.compute.manager [None req-82a16ffb-2871-4d18-8383-54dbada7d2b1 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1896.969670] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-07351fbd-d68c-406e-942e-881302fa7361 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.982781] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e56ba54-1c72-4fc6-8cf4-6fd41a57799f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.010759] env[61663]: WARNING nova.virt.vmwareapi.driver [None req-82a16ffb-2871-4d18-8383-54dbada7d2b1 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance b583b039-84c7-4168-91a1-82821c0001a3 could not be found. [ 1897.011011] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-82a16ffb-2871-4d18-8383-54dbada7d2b1 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1897.011344] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-981da606-8f8b-4219-b458-427a8f762388 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.016077] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1897.019382] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6db6efd1-fd9a-4d3c-9b24-f3e8e8b84081 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.047719] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-82a16ffb-2871-4d18-8383-54dbada7d2b1 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b583b039-84c7-4168-91a1-82821c0001a3 could not be found. [ 1897.047908] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-82a16ffb-2871-4d18-8383-54dbada7d2b1 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1897.048107] env[61663]: INFO nova.compute.manager [None req-82a16ffb-2871-4d18-8383-54dbada7d2b1 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Took 0.08 seconds to destroy the instance on the hypervisor. [ 1897.048350] env[61663]: DEBUG oslo.service.loopingcall [None req-82a16ffb-2871-4d18-8383-54dbada7d2b1 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1897.048556] env[61663]: DEBUG nova.compute.manager [-] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1897.048651] env[61663]: DEBUG nova.network.neutron [-] [instance: b583b039-84c7-4168-91a1-82821c0001a3] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1897.695015] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1897.708354] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1897.708586] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1897.708793] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1897.709017] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1897.710144] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99f1bdbb-17c7-4652-8e34-b1be7e09bf10 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.718783] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89c004dc-906f-4faf-a996-3217f902261e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.732972] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6501ad41-7a8c-484e-9a0f-669f6a1a04d4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.739065] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82cb54e-f9d4-46ca-8777-8c7666626a6c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.769593] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181327MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1897.769788] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1897.770029] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1897.859133] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 04488672-86c4-415b-961e-94641d570112 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1897.859303] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 7a7a0ef0-bbea-42c0-b96e-4efc4207a655 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1897.859433] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 1305216b-0ee5-499a-a82a-30b45a8c832c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1897.859558] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 47e1551e-ac80-4b4e-b568-3931c6dcf3b3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1897.859681] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1897.859879] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ef8528db-1338-4af6-9d4a-5eda7fe69a98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1897.860016] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 94f7665c-5247-4474-a9ea-700f1778af81 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1897.860202] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 768bef02-a114-4cac-a614-6e8a04ce0d18 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1897.860254] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1897.860373] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b51a331f-2b96-457f-9c9e-99379e8ae7fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1897.872256] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b1eece3b-003c-46ea-944d-ccac01ca4ba9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1897.888751] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 56dc70b4-ebff-42c5-bbdc-bf7ca4a7c73d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1897.903510] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 440175fc-da0c-4ea3-9a74-46e97e32658b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1897.915974] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 668c457f-7ebc-441f-8ece-cc63c571363b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1897.926791] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance d62bc172-a64b-481e-a2fa-55ad4ccf73f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1897.937976] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 3444cec9-7da9-47d9-b669-cd1b4261e9d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1897.948895] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 59d4580f-5897-42d6-82cb-0aead4d2658c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1897.959081] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ae347f45-f39e-47eb-9e37-80ddfc502c27 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1897.969578] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 525749ba-7de2-4ec5-8f7b-1f4c291710fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1897.972020] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1897.972020] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1920MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1898.201891] env[61663]: DEBUG nova.network.neutron [-] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1898.215230] env[61663]: INFO nova.compute.manager [-] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Took 1.17 seconds to deallocate network for instance. [ 1898.219945] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5326cd-3fdc-48f6-a217-12dc53ad2664 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.225790] env[61663]: DEBUG nova.compute.manager [req-8ce2a1b8-d86a-4ebc-b2fe-0e48bed9c845 req-01c64d42-f5d0-42c7-a51a-e3a5153771c4 service nova] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Received event network-vif-deleted-5de89de6-e60d-4821-8c6f-dabdd7e26079 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1898.226085] env[61663]: INFO nova.compute.manager [req-8ce2a1b8-d86a-4ebc-b2fe-0e48bed9c845 req-01c64d42-f5d0-42c7-a51a-e3a5153771c4 service nova] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Neutron deleted interface 5de89de6-e60d-4821-8c6f-dabdd7e26079; detaching it from the instance and deleting it from the info cache [ 1898.226322] env[61663]: DEBUG nova.network.neutron [req-8ce2a1b8-d86a-4ebc-b2fe-0e48bed9c845 req-01c64d42-f5d0-42c7-a51a-e3a5153771c4 service nova] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1898.234159] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b6622fb-7492-4c8d-bbf1-6a22d9390861 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.237513] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2b69d4ac-e115-4164-b47d-58577b94e8a0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.267202] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff08e140-d67b-4cd8-bba7-2a957dcd6bf0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.275705] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c24ab8e-6071-4547-a18e-fe6dfe06d146 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.291895] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453b84af-8523-46dd-aa92-e24b6c074df8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.306346] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1898.316548] env[61663]: DEBUG nova.compute.manager [req-8ce2a1b8-d86a-4ebc-b2fe-0e48bed9c845 req-01c64d42-f5d0-42c7-a51a-e3a5153771c4 service nova] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Detach interface failed, port_id=5de89de6-e60d-4821-8c6f-dabdd7e26079, reason: Instance b583b039-84c7-4168-91a1-82821c0001a3 could not be found. {{(pid=61663) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10956}} [ 1898.318191] env[61663]: INFO nova.compute.manager [None req-82a16ffb-2871-4d18-8383-54dbada7d2b1 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Took 0.10 seconds to detach 1 volumes for instance. [ 1898.321242] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1898.327175] env[61663]: DEBUG nova.compute.manager [None req-82a16ffb-2871-4d18-8383-54dbada7d2b1 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Deleting volume: 92db3222-ff59-45f0-9961-25b5e6d6e1ca {{(pid=61663) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3238}} [ 1898.336276] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1898.336480] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.566s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1898.415877] env[61663]: DEBUG oslo_concurrency.lockutils [None req-82a16ffb-2871-4d18-8383-54dbada7d2b1 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1898.415877] env[61663]: DEBUG oslo_concurrency.lockutils [None req-82a16ffb-2871-4d18-8383-54dbada7d2b1 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1898.415877] env[61663]: DEBUG nova.objects.instance [None req-82a16ffb-2871-4d18-8383-54dbada7d2b1 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Lazy-loading 'resources' on Instance uuid b583b039-84c7-4168-91a1-82821c0001a3 {{(pid=61663) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1898.699641] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dac41be-3d07-4374-8101-05bf06273798 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.708609] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ec604c-9c99-4cb0-8d4c-532474aa6d68 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.746316] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff753bdc-1aea-49d9-92f8-9b1789931f72 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.753851] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a08b4a-39a6-4ff7-a1e9-63481305a730 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.768771] env[61663]: DEBUG nova.compute.provider_tree [None req-82a16ffb-2871-4d18-8383-54dbada7d2b1 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1898.779340] env[61663]: DEBUG nova.scheduler.client.report [None req-82a16ffb-2871-4d18-8383-54dbada7d2b1 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1898.794409] env[61663]: DEBUG oslo_concurrency.lockutils [None req-82a16ffb-2871-4d18-8383-54dbada7d2b1 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.379s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1898.853828] env[61663]: DEBUG oslo_concurrency.lockutils [None req-82a16ffb-2871-4d18-8383-54dbada7d2b1 tempest-ServerActionsV293TestJSON-569664278 tempest-ServerActionsV293TestJSON-569664278-project-member] Lock "b583b039-84c7-4168-91a1-82821c0001a3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 1.889s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.336125] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1900.479201] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquiring lock "740f7887-4a5c-4889-9635-e9d9c6607ee7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1900.480624] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Lock "740f7887-4a5c-4889-9635-e9d9c6607ee7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1901.692141] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1901.692438] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1903.692445] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1903.692445] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1904.775096] env[61663]: DEBUG oslo_concurrency.lockutils [None req-6a07f348-dfc4-476f-8ba4-5234b0855129 tempest-ServerMetadataNegativeTestJSON-1947257258 tempest-ServerMetadataNegativeTestJSON-1947257258-project-member] Acquiring lock "44a9bf55-1c16-49aa-a61f-611696fb2c54" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1904.775096] env[61663]: DEBUG oslo_concurrency.lockutils [None req-6a07f348-dfc4-476f-8ba4-5234b0855129 tempest-ServerMetadataNegativeTestJSON-1947257258 tempest-ServerMetadataNegativeTestJSON-1947257258-project-member] Lock "44a9bf55-1c16-49aa-a61f-611696fb2c54" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1907.688778] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1916.589400] env[61663]: WARNING oslo_vmware.rw_handles [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1916.589400] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1916.589400] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1916.589400] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1916.589400] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1916.589400] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 1916.589400] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1916.589400] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1916.589400] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1916.589400] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1916.589400] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1916.589400] env[61663]: ERROR oslo_vmware.rw_handles [ 1916.590059] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/41880d64-d760-492c-8658-66df57a9bc43/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1916.591794] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1916.592057] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Copying Virtual Disk [datastore1] vmware_temp/41880d64-d760-492c-8658-66df57a9bc43/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/41880d64-d760-492c-8658-66df57a9bc43/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1916.592334] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-06e0d54c-d2a0-443b-a474-41f2885155c6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.600601] env[61663]: DEBUG oslo_vmware.api [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Waiting for the task: (returnval){ [ 1916.600601] env[61663]: value = "task-1690799" [ 1916.600601] env[61663]: _type = "Task" [ 1916.600601] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.608665] env[61663]: DEBUG oslo_vmware.api [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Task: {'id': task-1690799, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.110815] env[61663]: DEBUG oslo_vmware.exceptions [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1917.111148] env[61663]: DEBUG oslo_concurrency.lockutils [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1917.111754] env[61663]: ERROR nova.compute.manager [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1917.111754] env[61663]: Faults: ['InvalidArgument'] [ 1917.111754] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] Traceback (most recent call last): [ 1917.111754] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1917.111754] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] yield resources [ 1917.111754] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1917.111754] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] self.driver.spawn(context, instance, image_meta, [ 1917.111754] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1917.111754] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1917.111754] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1917.111754] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] self._fetch_image_if_missing(context, vi) [ 1917.111754] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1917.112143] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] image_cache(vi, tmp_image_ds_loc) [ 1917.112143] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1917.112143] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] vm_util.copy_virtual_disk( [ 1917.112143] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1917.112143] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] session._wait_for_task(vmdk_copy_task) [ 1917.112143] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1917.112143] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] return self.wait_for_task(task_ref) [ 1917.112143] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1917.112143] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] return evt.wait() [ 1917.112143] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1917.112143] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] result = hub.switch() [ 1917.112143] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1917.112143] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] return self.greenlet.switch() [ 1917.112514] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1917.112514] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] self.f(*self.args, **self.kw) [ 1917.112514] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1917.112514] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] raise exceptions.translate_fault(task_info.error) [ 1917.112514] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1917.112514] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] Faults: ['InvalidArgument'] [ 1917.112514] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] [ 1917.112514] env[61663]: INFO nova.compute.manager [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Terminating instance [ 1917.113668] env[61663]: DEBUG oslo_concurrency.lockutils [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1917.113868] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1917.114113] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c579b0a8-27e1-496f-bd62-512a802c7cc0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.116533] env[61663]: DEBUG nova.compute.manager [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1917.116722] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1917.117437] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-686b7ef3-ba04-4934-943f-daa74df330ed {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.123992] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1917.124221] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-596563e7-b6e7-47f7-92e6-58534bfc34f2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.126385] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1917.126560] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1917.127481] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-099f29e1-493b-4137-8683-cf4a21b34f76 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.132503] env[61663]: DEBUG oslo_vmware.api [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Waiting for the task: (returnval){ [ 1917.132503] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52062519-fef1-86e2-b075-ced1cad3fa5d" [ 1917.132503] env[61663]: _type = "Task" [ 1917.132503] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.139032] env[61663]: DEBUG oslo_vmware.api [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52062519-fef1-86e2-b075-ced1cad3fa5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.192125] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1917.192353] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1917.192537] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Deleting the datastore file [datastore1] 04488672-86c4-415b-961e-94641d570112 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1917.192793] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3e6c58bb-aad6-4faa-8c5d-da3291c2c2c0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.199294] env[61663]: DEBUG oslo_vmware.api [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Waiting for the task: (returnval){ [ 1917.199294] env[61663]: value = "task-1690801" [ 1917.199294] env[61663]: _type = "Task" [ 1917.199294] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.207605] env[61663]: DEBUG oslo_vmware.api [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Task: {'id': task-1690801, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.643070] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1917.643370] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Creating directory with path [datastore1] vmware_temp/de8b6b8c-28d5-486d-9de9-5e32cac7fdc1/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1917.643562] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d9917f46-6cdd-4478-a8c3-ade9753cddf9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.654883] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Created directory with path [datastore1] vmware_temp/de8b6b8c-28d5-486d-9de9-5e32cac7fdc1/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1917.655094] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Fetch image to [datastore1] vmware_temp/de8b6b8c-28d5-486d-9de9-5e32cac7fdc1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1917.655321] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/de8b6b8c-28d5-486d-9de9-5e32cac7fdc1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1917.656101] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a7f4604-43ed-48d8-be81-b2a36d6ab57b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.662869] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76f554ed-eb68-483c-a581-361132999d04 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.671825] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3933c7b-f818-4834-911d-d70069342bf0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.706356] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd2a933-1694-4bdb-a429-2a5c7c839e67 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.712932] env[61663]: DEBUG oslo_vmware.api [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Task: {'id': task-1690801, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072994} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.714434] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1917.714633] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1917.714808] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1917.714981] env[61663]: INFO nova.compute.manager [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1917.716754] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e4a10394-c51f-4242-8e57-af3739c7fc50 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.718592] env[61663]: DEBUG nova.compute.claims [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1917.718791] env[61663]: DEBUG oslo_concurrency.lockutils [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1917.719019] env[61663]: DEBUG oslo_concurrency.lockutils [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.743084] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1917.922951] env[61663]: DEBUG oslo_vmware.rw_handles [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/de8b6b8c-28d5-486d-9de9-5e32cac7fdc1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1917.984168] env[61663]: DEBUG oslo_vmware.rw_handles [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1917.984358] env[61663]: DEBUG oslo_vmware.rw_handles [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/de8b6b8c-28d5-486d-9de9-5e32cac7fdc1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1918.035155] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c26b4a-e590-45ff-8a00-e3b70552f038 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.043041] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2323200-da95-4e18-9471-4b96301bb778 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.073054] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0a89e8-7d4e-47c1-9239-92e8e912ddc3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.079563] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a51e174-7d4d-4a50-b871-19274cc06099 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.091895] env[61663]: DEBUG nova.compute.provider_tree [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1918.101747] env[61663]: DEBUG nova.scheduler.client.report [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1918.117069] env[61663]: DEBUG oslo_concurrency.lockutils [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.398s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.117582] env[61663]: ERROR nova.compute.manager [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1918.117582] env[61663]: Faults: ['InvalidArgument'] [ 1918.117582] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] Traceback (most recent call last): [ 1918.117582] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1918.117582] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] self.driver.spawn(context, instance, image_meta, [ 1918.117582] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1918.117582] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1918.117582] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1918.117582] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] self._fetch_image_if_missing(context, vi) [ 1918.117582] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1918.117582] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] image_cache(vi, tmp_image_ds_loc) [ 1918.117582] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1918.117977] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] vm_util.copy_virtual_disk( [ 1918.117977] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1918.117977] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] session._wait_for_task(vmdk_copy_task) [ 1918.117977] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1918.117977] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] return self.wait_for_task(task_ref) [ 1918.117977] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1918.117977] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] return evt.wait() [ 1918.117977] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1918.117977] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] result = hub.switch() [ 1918.117977] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1918.117977] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] return self.greenlet.switch() [ 1918.117977] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1918.117977] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] self.f(*self.args, **self.kw) [ 1918.118403] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1918.118403] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] raise exceptions.translate_fault(task_info.error) [ 1918.118403] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1918.118403] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] Faults: ['InvalidArgument'] [ 1918.118403] env[61663]: ERROR nova.compute.manager [instance: 04488672-86c4-415b-961e-94641d570112] [ 1918.118403] env[61663]: DEBUG nova.compute.utils [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1918.119643] env[61663]: DEBUG nova.compute.manager [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Build of instance 04488672-86c4-415b-961e-94641d570112 was re-scheduled: A specified parameter was not correct: fileType [ 1918.119643] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1918.120033] env[61663]: DEBUG nova.compute.manager [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1918.120210] env[61663]: DEBUG nova.compute.manager [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1918.120397] env[61663]: DEBUG nova.compute.manager [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1918.120565] env[61663]: DEBUG nova.network.neutron [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1918.757527] env[61663]: DEBUG nova.network.neutron [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1918.774805] env[61663]: INFO nova.compute.manager [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Took 0.65 seconds to deallocate network for instance. [ 1918.956113] env[61663]: INFO nova.scheduler.client.report [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Deleted allocations for instance 04488672-86c4-415b-961e-94641d570112 [ 1918.982496] env[61663]: DEBUG oslo_concurrency.lockutils [None req-99d4e6f2-45a7-42c8-b90b-db70b4b99b2b tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Lock "04488672-86c4-415b-961e-94641d570112" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 659.055s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.984077] env[61663]: DEBUG oslo_concurrency.lockutils [None req-97f131d4-3dc2-436b-8cd5-ffe2c9a1d660 tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Lock "04488672-86c4-415b-961e-94641d570112" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 461.076s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.984314] env[61663]: DEBUG oslo_concurrency.lockutils [None req-97f131d4-3dc2-436b-8cd5-ffe2c9a1d660 tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Acquiring lock "04488672-86c4-415b-961e-94641d570112-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.984528] env[61663]: DEBUG oslo_concurrency.lockutils [None req-97f131d4-3dc2-436b-8cd5-ffe2c9a1d660 tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Lock "04488672-86c4-415b-961e-94641d570112-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.984699] env[61663]: DEBUG oslo_concurrency.lockutils [None req-97f131d4-3dc2-436b-8cd5-ffe2c9a1d660 tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Lock "04488672-86c4-415b-961e-94641d570112-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.987644] env[61663]: INFO nova.compute.manager [None req-97f131d4-3dc2-436b-8cd5-ffe2c9a1d660 tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Terminating instance [ 1918.989264] env[61663]: DEBUG nova.compute.manager [None req-97f131d4-3dc2-436b-8cd5-ffe2c9a1d660 tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1918.989470] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-97f131d4-3dc2-436b-8cd5-ffe2c9a1d660 tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1918.990783] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0606dc82-d23c-4ace-8d80-68d1afdbc9fe {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.001095] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3254c7fa-752d-42a4-a6df-aa538ddee6b9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.011252] env[61663]: DEBUG nova.compute.manager [None req-64ed4f58-b531-489e-b926-8348f547cd97 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: a0399e6e-6b1a-4702-870d-d9644c3d6545] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1919.031963] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-97f131d4-3dc2-436b-8cd5-ffe2c9a1d660 tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 04488672-86c4-415b-961e-94641d570112 could not be found. [ 1919.032297] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-97f131d4-3dc2-436b-8cd5-ffe2c9a1d660 tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1919.032484] env[61663]: INFO nova.compute.manager [None req-97f131d4-3dc2-436b-8cd5-ffe2c9a1d660 tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] [instance: 04488672-86c4-415b-961e-94641d570112] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1919.032913] env[61663]: DEBUG oslo.service.loopingcall [None req-97f131d4-3dc2-436b-8cd5-ffe2c9a1d660 tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1919.033119] env[61663]: DEBUG nova.compute.manager [-] [instance: 04488672-86c4-415b-961e-94641d570112] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1919.033241] env[61663]: DEBUG nova.network.neutron [-] [instance: 04488672-86c4-415b-961e-94641d570112] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1919.035967] env[61663]: DEBUG nova.compute.manager [None req-64ed4f58-b531-489e-b926-8348f547cd97 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: a0399e6e-6b1a-4702-870d-d9644c3d6545] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1919.061824] env[61663]: DEBUG nova.network.neutron [-] [instance: 04488672-86c4-415b-961e-94641d570112] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1919.069887] env[61663]: INFO nova.compute.manager [-] [instance: 04488672-86c4-415b-961e-94641d570112] Took 0.04 seconds to deallocate network for instance. [ 1919.071013] env[61663]: DEBUG oslo_concurrency.lockutils [None req-64ed4f58-b531-489e-b926-8348f547cd97 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Lock "a0399e6e-6b1a-4702-870d-d9644c3d6545" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.861s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.081548] env[61663]: DEBUG nova.compute.manager [None req-c0ddae20-0dff-4002-8703-f2927a609081 tempest-ServersV294TestFqdnHostnames-638770955 tempest-ServersV294TestFqdnHostnames-638770955-project-member] [instance: 6d0f9509-1e63-4da8-a92b-9393a7cb4dff] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1919.104940] env[61663]: DEBUG nova.compute.manager [None req-c0ddae20-0dff-4002-8703-f2927a609081 tempest-ServersV294TestFqdnHostnames-638770955 tempest-ServersV294TestFqdnHostnames-638770955-project-member] [instance: 6d0f9509-1e63-4da8-a92b-9393a7cb4dff] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1919.125928] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c0ddae20-0dff-4002-8703-f2927a609081 tempest-ServersV294TestFqdnHostnames-638770955 tempest-ServersV294TestFqdnHostnames-638770955-project-member] Lock "6d0f9509-1e63-4da8-a92b-9393a7cb4dff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.184s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.137637] env[61663]: DEBUG nova.compute.manager [None req-353b30da-1798-4590-baf4-75a85e3180b2 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: b1eece3b-003c-46ea-944d-ccac01ca4ba9] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1919.164603] env[61663]: DEBUG nova.compute.manager [None req-353b30da-1798-4590-baf4-75a85e3180b2 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: b1eece3b-003c-46ea-944d-ccac01ca4ba9] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1919.170792] env[61663]: DEBUG oslo_concurrency.lockutils [None req-97f131d4-3dc2-436b-8cd5-ffe2c9a1d660 tempest-ImagesOneServerTestJSON-670042136 tempest-ImagesOneServerTestJSON-670042136-project-member] Lock "04488672-86c4-415b-961e-94641d570112" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.187s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.171629] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "04488672-86c4-415b-961e-94641d570112" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 130.086s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.171836] env[61663]: INFO nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 04488672-86c4-415b-961e-94641d570112] During sync_power_state the instance has a pending task (deleting). Skip. [ 1919.172052] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "04488672-86c4-415b-961e-94641d570112" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.183344] env[61663]: DEBUG oslo_concurrency.lockutils [None req-353b30da-1798-4590-baf4-75a85e3180b2 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Lock "b1eece3b-003c-46ea-944d-ccac01ca4ba9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.210s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.191155] env[61663]: DEBUG nova.compute.manager [None req-097ed397-2179-4bad-8690-4952b82f6804 tempest-ServerActionsTestOtherA-984726438 tempest-ServerActionsTestOtherA-984726438-project-member] [instance: 56dc70b4-ebff-42c5-bbdc-bf7ca4a7c73d] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1919.214979] env[61663]: DEBUG nova.compute.manager [None req-097ed397-2179-4bad-8690-4952b82f6804 tempest-ServerActionsTestOtherA-984726438 tempest-ServerActionsTestOtherA-984726438-project-member] [instance: 56dc70b4-ebff-42c5-bbdc-bf7ca4a7c73d] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1919.235490] env[61663]: DEBUG oslo_concurrency.lockutils [None req-097ed397-2179-4bad-8690-4952b82f6804 tempest-ServerActionsTestOtherA-984726438 tempest-ServerActionsTestOtherA-984726438-project-member] Lock "56dc70b4-ebff-42c5-bbdc-bf7ca4a7c73d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.373s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.244443] env[61663]: DEBUG nova.compute.manager [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1919.294558] env[61663]: DEBUG oslo_concurrency.lockutils [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1919.294814] env[61663]: DEBUG oslo_concurrency.lockutils [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.296692] env[61663]: INFO nova.compute.claims [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1919.534011] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58677f33-628e-44b8-9c33-65ab108b8769 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.541744] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-827b8421-6046-4b0c-af6b-102792228c3e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.571767] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4162db43-46da-489e-9945-ff307f44680b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.578651] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e3d7da-7372-4239-b550-6beff4a7fa11 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.591363] env[61663]: DEBUG nova.compute.provider_tree [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1919.599919] env[61663]: DEBUG nova.scheduler.client.report [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1919.612992] env[61663]: DEBUG oslo_concurrency.lockutils [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.318s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.613472] env[61663]: DEBUG nova.compute.manager [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1919.646183] env[61663]: DEBUG nova.compute.utils [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1919.647545] env[61663]: DEBUG nova.compute.manager [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1919.647710] env[61663]: DEBUG nova.network.neutron [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1919.655663] env[61663]: DEBUG nova.compute.manager [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1919.708528] env[61663]: DEBUG nova.policy [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '670d4550ac924a3787cbf6dfe1bf36b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba20bc7d580b4d7fa87a7b2fcc0c51d9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1919.714814] env[61663]: DEBUG nova.compute.manager [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1919.739737] env[61663]: DEBUG nova.virt.hardware [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1919.739977] env[61663]: DEBUG nova.virt.hardware [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1919.740150] env[61663]: DEBUG nova.virt.hardware [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1919.740335] env[61663]: DEBUG nova.virt.hardware [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1919.740482] env[61663]: DEBUG nova.virt.hardware [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1919.740694] env[61663]: DEBUG nova.virt.hardware [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1919.740910] env[61663]: DEBUG nova.virt.hardware [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1919.741086] env[61663]: DEBUG nova.virt.hardware [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1919.741261] env[61663]: DEBUG nova.virt.hardware [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1919.741437] env[61663]: DEBUG nova.virt.hardware [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1919.741648] env[61663]: DEBUG nova.virt.hardware [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1919.742490] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8869721-9431-4fe5-973e-ead059bf05f3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.750381] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2f9b56-71a5-4a2b-8337-fbaa5bb509c8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.199789] env[61663]: DEBUG nova.network.neutron [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Successfully created port: 8bde23e5-b7a7-4b0c-bd7b-6be3ef92cb52 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1920.894702] env[61663]: DEBUG nova.network.neutron [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Successfully updated port: 8bde23e5-b7a7-4b0c-bd7b-6be3ef92cb52 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1920.906183] env[61663]: DEBUG oslo_concurrency.lockutils [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Acquiring lock "refresh_cache-440175fc-da0c-4ea3-9a74-46e97e32658b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1920.906338] env[61663]: DEBUG oslo_concurrency.lockutils [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Acquired lock "refresh_cache-440175fc-da0c-4ea3-9a74-46e97e32658b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1920.906487] env[61663]: DEBUG nova.network.neutron [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1920.946650] env[61663]: DEBUG nova.network.neutron [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1920.970694] env[61663]: DEBUG nova.compute.manager [req-1f17e706-8fcf-424e-8f14-572b2d326db4 req-6e34d35b-4a46-4d04-a33c-30969782030e service nova] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Received event network-vif-plugged-8bde23e5-b7a7-4b0c-bd7b-6be3ef92cb52 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1920.970959] env[61663]: DEBUG oslo_concurrency.lockutils [req-1f17e706-8fcf-424e-8f14-572b2d326db4 req-6e34d35b-4a46-4d04-a33c-30969782030e service nova] Acquiring lock "440175fc-da0c-4ea3-9a74-46e97e32658b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1920.971217] env[61663]: DEBUG oslo_concurrency.lockutils [req-1f17e706-8fcf-424e-8f14-572b2d326db4 req-6e34d35b-4a46-4d04-a33c-30969782030e service nova] Lock "440175fc-da0c-4ea3-9a74-46e97e32658b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1920.971395] env[61663]: DEBUG oslo_concurrency.lockutils [req-1f17e706-8fcf-424e-8f14-572b2d326db4 req-6e34d35b-4a46-4d04-a33c-30969782030e service nova] Lock "440175fc-da0c-4ea3-9a74-46e97e32658b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.971574] env[61663]: DEBUG nova.compute.manager [req-1f17e706-8fcf-424e-8f14-572b2d326db4 req-6e34d35b-4a46-4d04-a33c-30969782030e service nova] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] No waiting events found dispatching network-vif-plugged-8bde23e5-b7a7-4b0c-bd7b-6be3ef92cb52 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1920.971729] env[61663]: WARNING nova.compute.manager [req-1f17e706-8fcf-424e-8f14-572b2d326db4 req-6e34d35b-4a46-4d04-a33c-30969782030e service nova] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Received unexpected event network-vif-plugged-8bde23e5-b7a7-4b0c-bd7b-6be3ef92cb52 for instance with vm_state building and task_state spawning. [ 1920.971889] env[61663]: DEBUG nova.compute.manager [req-1f17e706-8fcf-424e-8f14-572b2d326db4 req-6e34d35b-4a46-4d04-a33c-30969782030e service nova] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Received event network-changed-8bde23e5-b7a7-4b0c-bd7b-6be3ef92cb52 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1920.972096] env[61663]: DEBUG nova.compute.manager [req-1f17e706-8fcf-424e-8f14-572b2d326db4 req-6e34d35b-4a46-4d04-a33c-30969782030e service nova] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Refreshing instance network info cache due to event network-changed-8bde23e5-b7a7-4b0c-bd7b-6be3ef92cb52. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1920.972220] env[61663]: DEBUG oslo_concurrency.lockutils [req-1f17e706-8fcf-424e-8f14-572b2d326db4 req-6e34d35b-4a46-4d04-a33c-30969782030e service nova] Acquiring lock "refresh_cache-440175fc-da0c-4ea3-9a74-46e97e32658b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1921.242800] env[61663]: DEBUG nova.network.neutron [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Updating instance_info_cache with network_info: [{"id": "8bde23e5-b7a7-4b0c-bd7b-6be3ef92cb52", "address": "fa:16:3e:7a:a2:3f", "network": {"id": "4881720d-78ec-4fa5-ac05-915e1d68e0e0", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-829948714-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba20bc7d580b4d7fa87a7b2fcc0c51d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8145bd31-c4a7-4828-8818-d065010c9565", "external-id": "nsx-vlan-transportzone-760", "segmentation_id": 760, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8bde23e5-b7", "ovs_interfaceid": "8bde23e5-b7a7-4b0c-bd7b-6be3ef92cb52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1921.259948] env[61663]: DEBUG oslo_concurrency.lockutils [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Releasing lock "refresh_cache-440175fc-da0c-4ea3-9a74-46e97e32658b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1921.260438] env[61663]: DEBUG nova.compute.manager [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Instance network_info: |[{"id": "8bde23e5-b7a7-4b0c-bd7b-6be3ef92cb52", "address": "fa:16:3e:7a:a2:3f", "network": {"id": "4881720d-78ec-4fa5-ac05-915e1d68e0e0", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-829948714-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba20bc7d580b4d7fa87a7b2fcc0c51d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8145bd31-c4a7-4828-8818-d065010c9565", "external-id": "nsx-vlan-transportzone-760", "segmentation_id": 760, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8bde23e5-b7", "ovs_interfaceid": "8bde23e5-b7a7-4b0c-bd7b-6be3ef92cb52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1921.260633] env[61663]: DEBUG oslo_concurrency.lockutils [req-1f17e706-8fcf-424e-8f14-572b2d326db4 req-6e34d35b-4a46-4d04-a33c-30969782030e service nova] Acquired lock "refresh_cache-440175fc-da0c-4ea3-9a74-46e97e32658b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1921.260845] env[61663]: DEBUG nova.network.neutron [req-1f17e706-8fcf-424e-8f14-572b2d326db4 req-6e34d35b-4a46-4d04-a33c-30969782030e service nova] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Refreshing network info cache for port 8bde23e5-b7a7-4b0c-bd7b-6be3ef92cb52 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1921.262015] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:a2:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8145bd31-c4a7-4828-8818-d065010c9565', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8bde23e5-b7a7-4b0c-bd7b-6be3ef92cb52', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1921.269513] env[61663]: DEBUG oslo.service.loopingcall [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1921.270430] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1921.273112] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-abfc71f8-c65d-441e-9d0e-84b0dd29c6ab {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.292914] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1921.292914] env[61663]: value = "task-1690802" [ 1921.292914] env[61663]: _type = "Task" [ 1921.292914] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.300455] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690802, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.703438] env[61663]: DEBUG nova.network.neutron [req-1f17e706-8fcf-424e-8f14-572b2d326db4 req-6e34d35b-4a46-4d04-a33c-30969782030e service nova] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Updated VIF entry in instance network info cache for port 8bde23e5-b7a7-4b0c-bd7b-6be3ef92cb52. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1921.703819] env[61663]: DEBUG nova.network.neutron [req-1f17e706-8fcf-424e-8f14-572b2d326db4 req-6e34d35b-4a46-4d04-a33c-30969782030e service nova] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Updating instance_info_cache with network_info: [{"id": "8bde23e5-b7a7-4b0c-bd7b-6be3ef92cb52", "address": "fa:16:3e:7a:a2:3f", "network": {"id": "4881720d-78ec-4fa5-ac05-915e1d68e0e0", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-829948714-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba20bc7d580b4d7fa87a7b2fcc0c51d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8145bd31-c4a7-4828-8818-d065010c9565", "external-id": "nsx-vlan-transportzone-760", "segmentation_id": 760, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8bde23e5-b7", "ovs_interfaceid": "8bde23e5-b7a7-4b0c-bd7b-6be3ef92cb52", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1921.713178] env[61663]: DEBUG oslo_concurrency.lockutils [req-1f17e706-8fcf-424e-8f14-572b2d326db4 req-6e34d35b-4a46-4d04-a33c-30969782030e service nova] Releasing lock "refresh_cache-440175fc-da0c-4ea3-9a74-46e97e32658b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1921.802725] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690802, 'name': CreateVM_Task, 'duration_secs': 0.277523} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1921.802910] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1921.803535] env[61663]: DEBUG oslo_concurrency.lockutils [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1921.803713] env[61663]: DEBUG oslo_concurrency.lockutils [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1921.804044] env[61663]: DEBUG oslo_concurrency.lockutils [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1921.804290] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75908358-4f44-410e-9e1c-3585168f90fa {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.808849] env[61663]: DEBUG oslo_vmware.api [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Waiting for the task: (returnval){ [ 1921.808849] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529bc7c0-5721-192f-a8e2-c6b12b26fadc" [ 1921.808849] env[61663]: _type = "Task" [ 1921.808849] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.816053] env[61663]: DEBUG oslo_vmware.api [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529bc7c0-5721-192f-a8e2-c6b12b26fadc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.319484] env[61663]: DEBUG oslo_concurrency.lockutils [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1922.319884] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1922.319931] env[61663]: DEBUG oslo_concurrency.lockutils [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1929.614867] env[61663]: DEBUG oslo_concurrency.lockutils [None req-54ab643d-05e4-4478-b35d-b4ad59e9b85f tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Acquiring lock "440175fc-da0c-4ea3-9a74-46e97e32658b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1933.012594] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Acquiring lock "54a78c20-cbf6-453b-88e4-2fb4da0a6200" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1933.012968] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Lock "54a78c20-cbf6-453b-88e4-2fb4da0a6200" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1954.692621] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1954.692933] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1957.688291] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1957.691921] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1957.692111] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1957.692241] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1957.715938] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1957.716099] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1957.716229] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1957.716355] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1957.716478] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1957.716600] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1957.716721] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1957.716957] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1957.717111] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1957.717233] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1957.717352] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1959.692886] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1959.705351] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1959.705717] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1959.705717] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1959.705850] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1959.707031] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34c8e4db-eaf0-473d-a734-d1c916560f76 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.716671] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99886d2a-6295-459d-84ab-d6c9ea416208 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.731951] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e11d493-0eea-4fbc-bff8-d0ff8324fed5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.738412] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a284087e-a46c-4130-9073-c608887c64ef {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.766616] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181323MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1959.766768] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1959.766945] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1959.840042] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 7a7a0ef0-bbea-42c0-b96e-4efc4207a655 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1959.840168] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 1305216b-0ee5-499a-a82a-30b45a8c832c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1959.840211] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 47e1551e-ac80-4b4e-b568-3931c6dcf3b3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1959.840332] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1959.840474] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ef8528db-1338-4af6-9d4a-5eda7fe69a98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1959.840604] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 94f7665c-5247-4474-a9ea-700f1778af81 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1959.840724] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 768bef02-a114-4cac-a614-6e8a04ce0d18 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1959.840839] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1959.840955] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b51a331f-2b96-457f-9c9e-99379e8ae7fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1959.841079] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 440175fc-da0c-4ea3-9a74-46e97e32658b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1959.851398] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 668c457f-7ebc-441f-8ece-cc63c571363b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1959.861423] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance d62bc172-a64b-481e-a2fa-55ad4ccf73f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1959.870826] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 3444cec9-7da9-47d9-b669-cd1b4261e9d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1959.880778] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 59d4580f-5897-42d6-82cb-0aead4d2658c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1959.889755] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ae347f45-f39e-47eb-9e37-80ddfc502c27 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1959.899305] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 525749ba-7de2-4ec5-8f7b-1f4c291710fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1959.908587] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 740f7887-4a5c-4889-9635-e9d9c6607ee7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1959.917459] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 44a9bf55-1c16-49aa-a61f-611696fb2c54 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1959.926728] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 54a78c20-cbf6-453b-88e4-2fb4da0a6200 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1959.926957] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1959.927119] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1960.128961] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1df39cc0-c2be-4de1-a40f-ad0275f5612d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.136128] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7dc35ee-3492-4a86-977e-c272868bea87 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.166489] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd740c1-d161-4aa3-8000-9c8162187d3d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.173529] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-143dabef-cd8a-4478-bd0e-3c6fec61e08a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.186215] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1960.195592] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1960.208527] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1960.208709] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.442s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1961.208223] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1961.692495] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1961.692746] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1964.692794] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1964.693188] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1965.701861] env[61663]: WARNING oslo_vmware.rw_handles [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1965.701861] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1965.701861] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1965.701861] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1965.701861] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1965.701861] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 1965.701861] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1965.701861] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1965.701861] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1965.701861] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1965.701861] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1965.701861] env[61663]: ERROR oslo_vmware.rw_handles [ 1965.702591] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/de8b6b8c-28d5-486d-9de9-5e32cac7fdc1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1965.704433] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1965.704712] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Copying Virtual Disk [datastore1] vmware_temp/de8b6b8c-28d5-486d-9de9-5e32cac7fdc1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/de8b6b8c-28d5-486d-9de9-5e32cac7fdc1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1965.705036] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7d0dea74-369d-4e13-be98-3626491a3f2b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.713091] env[61663]: DEBUG oslo_vmware.api [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Waiting for the task: (returnval){ [ 1965.713091] env[61663]: value = "task-1690803" [ 1965.713091] env[61663]: _type = "Task" [ 1965.713091] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.720379] env[61663]: DEBUG oslo_vmware.api [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Task: {'id': task-1690803, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.224222] env[61663]: DEBUG oslo_vmware.exceptions [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1966.224495] env[61663]: DEBUG oslo_concurrency.lockutils [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1966.225032] env[61663]: ERROR nova.compute.manager [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1966.225032] env[61663]: Faults: ['InvalidArgument'] [ 1966.225032] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Traceback (most recent call last): [ 1966.225032] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1966.225032] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] yield resources [ 1966.225032] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1966.225032] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] self.driver.spawn(context, instance, image_meta, [ 1966.225032] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1966.225032] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1966.225032] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1966.225032] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] self._fetch_image_if_missing(context, vi) [ 1966.225032] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1966.225530] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] image_cache(vi, tmp_image_ds_loc) [ 1966.225530] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1966.225530] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] vm_util.copy_virtual_disk( [ 1966.225530] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1966.225530] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] session._wait_for_task(vmdk_copy_task) [ 1966.225530] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1966.225530] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] return self.wait_for_task(task_ref) [ 1966.225530] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1966.225530] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] return evt.wait() [ 1966.225530] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1966.225530] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] result = hub.switch() [ 1966.225530] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1966.225530] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] return self.greenlet.switch() [ 1966.225910] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1966.225910] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] self.f(*self.args, **self.kw) [ 1966.225910] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1966.225910] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] raise exceptions.translate_fault(task_info.error) [ 1966.225910] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1966.225910] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Faults: ['InvalidArgument'] [ 1966.225910] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] [ 1966.225910] env[61663]: INFO nova.compute.manager [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Terminating instance [ 1966.226879] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1966.227100] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1966.227337] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae05f0e8-7d17-4545-818a-58fbbcb5c581 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.230379] env[61663]: DEBUG nova.compute.manager [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1966.230577] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1966.231290] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8d43e06-7444-4efd-a685-00a3fd33b649 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.237723] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1966.237924] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0909f5c8-1b87-4cab-b0af-85a0979f9410 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.239925] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1966.240144] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1966.241067] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47b95992-5065-4b67-b03a-4b34e2c1da12 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.245693] env[61663]: DEBUG oslo_vmware.api [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Waiting for the task: (returnval){ [ 1966.245693] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528ae12e-5c19-3390-b5ac-dd1afa7f345f" [ 1966.245693] env[61663]: _type = "Task" [ 1966.245693] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.252473] env[61663]: DEBUG oslo_vmware.api [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528ae12e-5c19-3390-b5ac-dd1afa7f345f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.303965] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1966.304318] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1966.304540] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Deleting the datastore file [datastore1] 7a7a0ef0-bbea-42c0-b96e-4efc4207a655 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1966.304812] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dab9c173-f15d-4f15-9bd4-4525ae5bf943 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.310829] env[61663]: DEBUG oslo_vmware.api [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Waiting for the task: (returnval){ [ 1966.310829] env[61663]: value = "task-1690805" [ 1966.310829] env[61663]: _type = "Task" [ 1966.310829] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.318014] env[61663]: DEBUG oslo_vmware.api [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Task: {'id': task-1690805, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.755579] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1966.755867] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Creating directory with path [datastore1] vmware_temp/23da5ad3-a2c4-4c1c-a8da-4da6352b7b93/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1966.756087] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-77025ec7-c97f-4528-9cea-ede66888db70 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.768326] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Created directory with path [datastore1] vmware_temp/23da5ad3-a2c4-4c1c-a8da-4da6352b7b93/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1966.768511] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Fetch image to [datastore1] vmware_temp/23da5ad3-a2c4-4c1c-a8da-4da6352b7b93/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1966.768675] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/23da5ad3-a2c4-4c1c-a8da-4da6352b7b93/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1966.769369] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca21dcf6-83ef-4ac7-b7a1-665e5c8cda2b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.775542] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ccf123-4b65-4a0c-8e8d-74df8d6a3b95 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.784199] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-748143bc-089a-4fe7-8c03-7a12a9554da2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.816036] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63d256c-8438-4b75-a9ca-b307c9a646f0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.822916] env[61663]: DEBUG oslo_vmware.api [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Task: {'id': task-1690805, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078867} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1966.824295] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1966.824486] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1966.824660] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1966.824835] env[61663]: INFO nova.compute.manager [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1966.826554] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ccc7c205-aa29-492f-b995-64c5427732d8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.828975] env[61663]: DEBUG nova.compute.claims [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1966.829155] env[61663]: DEBUG oslo_concurrency.lockutils [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1966.829369] env[61663]: DEBUG oslo_concurrency.lockutils [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1966.846404] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1966.900062] env[61663]: DEBUG oslo_vmware.rw_handles [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/23da5ad3-a2c4-4c1c-a8da-4da6352b7b93/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1966.962113] env[61663]: DEBUG oslo_vmware.rw_handles [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1966.962338] env[61663]: DEBUG oslo_vmware.rw_handles [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/23da5ad3-a2c4-4c1c-a8da-4da6352b7b93/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1967.142014] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-531b38b3-813d-4cbf-bb89-5e8c9720769f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.150323] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-841404f5-ca5b-43e6-a810-0f5db405d2d5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.183505] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4408ac8-164e-480b-b005-b6e4cf7f1b66 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.191994] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdc89523-5ec4-4a4d-9298-3dae28cb75f7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.207251] env[61663]: DEBUG nova.compute.provider_tree [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1967.217024] env[61663]: DEBUG nova.scheduler.client.report [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1967.230670] env[61663]: DEBUG oslo_concurrency.lockutils [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.401s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1967.231216] env[61663]: ERROR nova.compute.manager [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1967.231216] env[61663]: Faults: ['InvalidArgument'] [ 1967.231216] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Traceback (most recent call last): [ 1967.231216] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1967.231216] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] self.driver.spawn(context, instance, image_meta, [ 1967.231216] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1967.231216] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1967.231216] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1967.231216] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] self._fetch_image_if_missing(context, vi) [ 1967.231216] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1967.231216] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] image_cache(vi, tmp_image_ds_loc) [ 1967.231216] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1967.231635] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] vm_util.copy_virtual_disk( [ 1967.231635] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1967.231635] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] session._wait_for_task(vmdk_copy_task) [ 1967.231635] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1967.231635] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] return self.wait_for_task(task_ref) [ 1967.231635] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1967.231635] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] return evt.wait() [ 1967.231635] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1967.231635] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] result = hub.switch() [ 1967.231635] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1967.231635] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] return self.greenlet.switch() [ 1967.231635] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1967.231635] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] self.f(*self.args, **self.kw) [ 1967.232030] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1967.232030] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] raise exceptions.translate_fault(task_info.error) [ 1967.232030] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1967.232030] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Faults: ['InvalidArgument'] [ 1967.232030] env[61663]: ERROR nova.compute.manager [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] [ 1967.232030] env[61663]: DEBUG nova.compute.utils [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1967.233559] env[61663]: DEBUG nova.compute.manager [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Build of instance 7a7a0ef0-bbea-42c0-b96e-4efc4207a655 was re-scheduled: A specified parameter was not correct: fileType [ 1967.233559] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1967.233946] env[61663]: DEBUG nova.compute.manager [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1967.234134] env[61663]: DEBUG nova.compute.manager [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1967.234339] env[61663]: DEBUG nova.compute.manager [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1967.234526] env[61663]: DEBUG nova.network.neutron [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1967.528711] env[61663]: DEBUG nova.network.neutron [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1967.539173] env[61663]: INFO nova.compute.manager [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Took 0.30 seconds to deallocate network for instance. [ 1967.656932] env[61663]: INFO nova.scheduler.client.report [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Deleted allocations for instance 7a7a0ef0-bbea-42c0-b96e-4efc4207a655 [ 1967.681408] env[61663]: DEBUG oslo_concurrency.lockutils [None req-532ec655-8400-4e35-9f37-4ddb522b2b0d tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Lock "7a7a0ef0-bbea-42c0-b96e-4efc4207a655" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 682.414s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1967.682635] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d8c60d36-53a6-4aa7-a8e2-12fb27433495 tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Lock "7a7a0ef0-bbea-42c0-b96e-4efc4207a655" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 484.566s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1967.682859] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d8c60d36-53a6-4aa7-a8e2-12fb27433495 tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Acquiring lock "7a7a0ef0-bbea-42c0-b96e-4efc4207a655-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1967.683086] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d8c60d36-53a6-4aa7-a8e2-12fb27433495 tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Lock "7a7a0ef0-bbea-42c0-b96e-4efc4207a655-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1967.683280] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d8c60d36-53a6-4aa7-a8e2-12fb27433495 tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Lock "7a7a0ef0-bbea-42c0-b96e-4efc4207a655-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1967.685248] env[61663]: INFO nova.compute.manager [None req-d8c60d36-53a6-4aa7-a8e2-12fb27433495 tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Terminating instance [ 1967.687807] env[61663]: DEBUG nova.compute.manager [None req-d8c60d36-53a6-4aa7-a8e2-12fb27433495 tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1967.688020] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-d8c60d36-53a6-4aa7-a8e2-12fb27433495 tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1967.688512] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-38cfc36f-f721-42d3-a8d7-e2ea992fbc02 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.695193] env[61663]: DEBUG nova.compute.manager [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1967.701543] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b6cb162-74e2-4a7e-a7de-cc961112fd71 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.742379] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-d8c60d36-53a6-4aa7-a8e2-12fb27433495 tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7a7a0ef0-bbea-42c0-b96e-4efc4207a655 could not be found. [ 1967.742379] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-d8c60d36-53a6-4aa7-a8e2-12fb27433495 tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1967.742379] env[61663]: INFO nova.compute.manager [None req-d8c60d36-53a6-4aa7-a8e2-12fb27433495 tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1967.742379] env[61663]: DEBUG oslo.service.loopingcall [None req-d8c60d36-53a6-4aa7-a8e2-12fb27433495 tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1967.742379] env[61663]: DEBUG nova.compute.manager [-] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1967.746836] env[61663]: DEBUG nova.network.neutron [-] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1967.760589] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1967.761366] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1967.762859] env[61663]: INFO nova.compute.claims [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1968.001239] env[61663]: DEBUG nova.network.neutron [-] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1968.012491] env[61663]: INFO nova.compute.manager [-] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] Took 0.28 seconds to deallocate network for instance. [ 1968.030853] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd411a93-ef39-4207-919b-4004d8532b51 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.039291] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac0b2104-8701-4773-9a99-17e8d5e7502a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.076758] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc1cde2c-d21a-4ef6-aec6-eeda8e1b6a11 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.084690] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c2a93d-8a8d-42c1-bdc0-9f54cc6f93fa {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.100326] env[61663]: DEBUG nova.compute.provider_tree [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1968.108749] env[61663]: DEBUG nova.scheduler.client.report [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1968.129689] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.369s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1968.130249] env[61663]: DEBUG nova.compute.manager [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1968.148030] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d8c60d36-53a6-4aa7-a8e2-12fb27433495 tempest-ServerAddressesTestJSON-255205572 tempest-ServerAddressesTestJSON-255205572-project-member] Lock "7a7a0ef0-bbea-42c0-b96e-4efc4207a655" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.465s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1968.148410] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "7a7a0ef0-bbea-42c0-b96e-4efc4207a655" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 179.062s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1968.148532] env[61663]: INFO nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 7a7a0ef0-bbea-42c0-b96e-4efc4207a655] During sync_power_state the instance has a pending task (deleting). Skip. [ 1968.148705] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "7a7a0ef0-bbea-42c0-b96e-4efc4207a655" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1968.163731] env[61663]: DEBUG nova.compute.utils [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1968.164941] env[61663]: DEBUG nova.compute.manager [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1968.165120] env[61663]: DEBUG nova.network.neutron [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1968.173377] env[61663]: DEBUG nova.compute.manager [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1968.237747] env[61663]: DEBUG nova.policy [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9519ddc1c41d47bba82a76b9e513aeec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'df1318bf07264ff3bbda8ad84e017b6d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 1968.242403] env[61663]: DEBUG nova.compute.manager [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1968.271067] env[61663]: DEBUG nova.virt.hardware [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1968.271309] env[61663]: DEBUG nova.virt.hardware [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1968.271493] env[61663]: DEBUG nova.virt.hardware [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1968.271697] env[61663]: DEBUG nova.virt.hardware [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1968.271848] env[61663]: DEBUG nova.virt.hardware [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1968.271998] env[61663]: DEBUG nova.virt.hardware [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1968.272773] env[61663]: DEBUG nova.virt.hardware [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1968.272952] env[61663]: DEBUG nova.virt.hardware [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1968.273171] env[61663]: DEBUG nova.virt.hardware [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1968.273321] env[61663]: DEBUG nova.virt.hardware [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1968.273497] env[61663]: DEBUG nova.virt.hardware [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1968.274335] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88bfe665-f568-4b48-b164-d524dc16688d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.282439] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a96e60-8a10-4f16-bd2a-58342b65f96a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.612088] env[61663]: DEBUG nova.network.neutron [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Successfully created port: 0f940e82-0e80-492d-91ff-57bd4385df38 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1969.385808] env[61663]: DEBUG nova.network.neutron [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Successfully updated port: 0f940e82-0e80-492d-91ff-57bd4385df38 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1969.397534] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Acquiring lock "refresh_cache-668c457f-7ebc-441f-8ece-cc63c571363b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1969.397700] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Acquired lock "refresh_cache-668c457f-7ebc-441f-8ece-cc63c571363b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1969.397860] env[61663]: DEBUG nova.network.neutron [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1969.481108] env[61663]: DEBUG nova.network.neutron [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1969.618506] env[61663]: DEBUG nova.compute.manager [req-584e7bdd-7a61-4a30-927c-eedcb54ed7b2 req-b90280c1-ea4c-4461-bac1-425a15054146 service nova] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Received event network-vif-plugged-0f940e82-0e80-492d-91ff-57bd4385df38 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1969.618774] env[61663]: DEBUG oslo_concurrency.lockutils [req-584e7bdd-7a61-4a30-927c-eedcb54ed7b2 req-b90280c1-ea4c-4461-bac1-425a15054146 service nova] Acquiring lock "668c457f-7ebc-441f-8ece-cc63c571363b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1969.618897] env[61663]: DEBUG oslo_concurrency.lockutils [req-584e7bdd-7a61-4a30-927c-eedcb54ed7b2 req-b90280c1-ea4c-4461-bac1-425a15054146 service nova] Lock "668c457f-7ebc-441f-8ece-cc63c571363b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1969.619078] env[61663]: DEBUG oslo_concurrency.lockutils [req-584e7bdd-7a61-4a30-927c-eedcb54ed7b2 req-b90280c1-ea4c-4461-bac1-425a15054146 service nova] Lock "668c457f-7ebc-441f-8ece-cc63c571363b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.619281] env[61663]: DEBUG nova.compute.manager [req-584e7bdd-7a61-4a30-927c-eedcb54ed7b2 req-b90280c1-ea4c-4461-bac1-425a15054146 service nova] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] No waiting events found dispatching network-vif-plugged-0f940e82-0e80-492d-91ff-57bd4385df38 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1969.619409] env[61663]: WARNING nova.compute.manager [req-584e7bdd-7a61-4a30-927c-eedcb54ed7b2 req-b90280c1-ea4c-4461-bac1-425a15054146 service nova] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Received unexpected event network-vif-plugged-0f940e82-0e80-492d-91ff-57bd4385df38 for instance with vm_state building and task_state spawning. [ 1969.619569] env[61663]: DEBUG nova.compute.manager [req-584e7bdd-7a61-4a30-927c-eedcb54ed7b2 req-b90280c1-ea4c-4461-bac1-425a15054146 service nova] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Received event network-changed-0f940e82-0e80-492d-91ff-57bd4385df38 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1969.619725] env[61663]: DEBUG nova.compute.manager [req-584e7bdd-7a61-4a30-927c-eedcb54ed7b2 req-b90280c1-ea4c-4461-bac1-425a15054146 service nova] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Refreshing instance network info cache due to event network-changed-0f940e82-0e80-492d-91ff-57bd4385df38. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1969.619892] env[61663]: DEBUG oslo_concurrency.lockutils [req-584e7bdd-7a61-4a30-927c-eedcb54ed7b2 req-b90280c1-ea4c-4461-bac1-425a15054146 service nova] Acquiring lock "refresh_cache-668c457f-7ebc-441f-8ece-cc63c571363b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1969.725800] env[61663]: DEBUG nova.network.neutron [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Updating instance_info_cache with network_info: [{"id": "0f940e82-0e80-492d-91ff-57bd4385df38", "address": "fa:16:3e:da:4b:73", "network": {"id": "1f6704dd-433c-4e3f-88aa-109f2e318391", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-310120332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df1318bf07264ff3bbda8ad84e017b6d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f940e82-0e", "ovs_interfaceid": "0f940e82-0e80-492d-91ff-57bd4385df38", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1969.737166] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Releasing lock "refresh_cache-668c457f-7ebc-441f-8ece-cc63c571363b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1969.737452] env[61663]: DEBUG nova.compute.manager [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Instance network_info: |[{"id": "0f940e82-0e80-492d-91ff-57bd4385df38", "address": "fa:16:3e:da:4b:73", "network": {"id": "1f6704dd-433c-4e3f-88aa-109f2e318391", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-310120332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df1318bf07264ff3bbda8ad84e017b6d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f940e82-0e", "ovs_interfaceid": "0f940e82-0e80-492d-91ff-57bd4385df38", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1969.737747] env[61663]: DEBUG oslo_concurrency.lockutils [req-584e7bdd-7a61-4a30-927c-eedcb54ed7b2 req-b90280c1-ea4c-4461-bac1-425a15054146 service nova] Acquired lock "refresh_cache-668c457f-7ebc-441f-8ece-cc63c571363b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1969.737929] env[61663]: DEBUG nova.network.neutron [req-584e7bdd-7a61-4a30-927c-eedcb54ed7b2 req-b90280c1-ea4c-4461-bac1-425a15054146 service nova] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Refreshing network info cache for port 0f940e82-0e80-492d-91ff-57bd4385df38 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1969.738988] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:4b:73', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '24144f5a-050a-4f1e-8d8c-774dc16dc791', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f940e82-0e80-492d-91ff-57bd4385df38', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1969.746382] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Creating folder: Project (df1318bf07264ff3bbda8ad84e017b6d). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1969.747251] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4cd4c4d2-a63b-4d1b-90b3-69117e3a0893 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.765700] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Created folder: Project (df1318bf07264ff3bbda8ad84e017b6d) in parent group-v352575. [ 1969.765900] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Creating folder: Instances. Parent ref: group-v352652. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1969.766156] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8ccd6bc7-3f44-48f6-8924-018fa29b487a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.778635] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Created folder: Instances in parent group-v352652. [ 1969.778859] env[61663]: DEBUG oslo.service.loopingcall [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1969.779055] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1969.779264] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b2559f1-6279-467d-8d2c-05bf38a641f0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.799427] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1969.799427] env[61663]: value = "task-1690808" [ 1969.799427] env[61663]: _type = "Task" [ 1969.799427] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1969.806234] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690808, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.062024] env[61663]: DEBUG nova.network.neutron [req-584e7bdd-7a61-4a30-927c-eedcb54ed7b2 req-b90280c1-ea4c-4461-bac1-425a15054146 service nova] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Updated VIF entry in instance network info cache for port 0f940e82-0e80-492d-91ff-57bd4385df38. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1970.062540] env[61663]: DEBUG nova.network.neutron [req-584e7bdd-7a61-4a30-927c-eedcb54ed7b2 req-b90280c1-ea4c-4461-bac1-425a15054146 service nova] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Updating instance_info_cache with network_info: [{"id": "0f940e82-0e80-492d-91ff-57bd4385df38", "address": "fa:16:3e:da:4b:73", "network": {"id": "1f6704dd-433c-4e3f-88aa-109f2e318391", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-310120332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "df1318bf07264ff3bbda8ad84e017b6d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "24144f5a-050a-4f1e-8d8c-774dc16dc791", "external-id": "cl2-zone-252", "segmentation_id": 252, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f940e82-0e", "ovs_interfaceid": "0f940e82-0e80-492d-91ff-57bd4385df38", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1970.072124] env[61663]: DEBUG oslo_concurrency.lockutils [req-584e7bdd-7a61-4a30-927c-eedcb54ed7b2 req-b90280c1-ea4c-4461-bac1-425a15054146 service nova] Releasing lock "refresh_cache-668c457f-7ebc-441f-8ece-cc63c571363b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1970.309461] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690808, 'name': CreateVM_Task, 'duration_secs': 0.300205} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.309673] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1970.310490] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1970.310590] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1970.310894] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1970.311161] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3428eb7a-5f17-4bb7-b462-6ca4a0b7db8b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.316129] env[61663]: DEBUG oslo_vmware.api [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Waiting for the task: (returnval){ [ 1970.316129] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52dc4c9e-4e38-edae-e5e3-b85afa82d454" [ 1970.316129] env[61663]: _type = "Task" [ 1970.316129] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1970.327273] env[61663]: DEBUG oslo_vmware.api [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52dc4c9e-4e38-edae-e5e3-b85afa82d454, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.827521] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1970.827781] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1970.827988] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1974.953897] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1971f0c1-a57a-4587-949b-4e05e80433ca tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Acquiring lock "668c457f-7ebc-441f-8ece-cc63c571363b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1975.138036] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Acquiring lock "8e705624-9787-4d34-a3d4-f56b7b4fdcc2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1975.138367] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Lock "8e705624-9787-4d34-a3d4-f56b7b4fdcc2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1975.293443] env[61663]: DEBUG oslo_concurrency.lockutils [None req-997aace2-bef5-43a1-9f12-ce256dc1e079 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Acquiring lock "12ecd7c0-dcb3-42f6-8560-c239f786254c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1975.293443] env[61663]: DEBUG oslo_concurrency.lockutils [None req-997aace2-bef5-43a1-9f12-ce256dc1e079 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Lock "12ecd7c0-dcb3-42f6-8560-c239f786254c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.553988] env[61663]: WARNING oslo_vmware.rw_handles [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2014.553988] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2014.553988] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2014.553988] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2014.553988] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2014.553988] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 2014.553988] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2014.553988] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2014.553988] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2014.553988] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2014.553988] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2014.553988] env[61663]: ERROR oslo_vmware.rw_handles [ 2014.554576] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/23da5ad3-a2c4-4c1c-a8da-4da6352b7b93/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2014.556344] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2014.556587] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Copying Virtual Disk [datastore1] vmware_temp/23da5ad3-a2c4-4c1c-a8da-4da6352b7b93/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/23da5ad3-a2c4-4c1c-a8da-4da6352b7b93/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2014.556878] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b47ce6c2-d651-4ad9-a40d-e21431aa5ba4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.564905] env[61663]: DEBUG oslo_vmware.api [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Waiting for the task: (returnval){ [ 2014.564905] env[61663]: value = "task-1690809" [ 2014.564905] env[61663]: _type = "Task" [ 2014.564905] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.572710] env[61663]: DEBUG oslo_vmware.api [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Task: {'id': task-1690809, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.075837] env[61663]: DEBUG oslo_vmware.exceptions [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2015.076146] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2015.076798] env[61663]: ERROR nova.compute.manager [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2015.076798] env[61663]: Faults: ['InvalidArgument'] [ 2015.076798] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Traceback (most recent call last): [ 2015.076798] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2015.076798] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] yield resources [ 2015.076798] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2015.076798] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] self.driver.spawn(context, instance, image_meta, [ 2015.076798] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2015.076798] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2015.076798] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2015.076798] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] self._fetch_image_if_missing(context, vi) [ 2015.076798] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2015.077102] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] image_cache(vi, tmp_image_ds_loc) [ 2015.077102] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2015.077102] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] vm_util.copy_virtual_disk( [ 2015.077102] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2015.077102] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] session._wait_for_task(vmdk_copy_task) [ 2015.077102] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2015.077102] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] return self.wait_for_task(task_ref) [ 2015.077102] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2015.077102] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] return evt.wait() [ 2015.077102] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2015.077102] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] result = hub.switch() [ 2015.077102] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2015.077102] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] return self.greenlet.switch() [ 2015.077561] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2015.077561] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] self.f(*self.args, **self.kw) [ 2015.077561] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2015.077561] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] raise exceptions.translate_fault(task_info.error) [ 2015.077561] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2015.077561] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Faults: ['InvalidArgument'] [ 2015.077561] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] [ 2015.077561] env[61663]: INFO nova.compute.manager [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Terminating instance [ 2015.078739] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2015.078961] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2015.079213] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fbf12f54-0be1-422b-87a6-573ac2bfa412 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.081404] env[61663]: DEBUG nova.compute.manager [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2015.081612] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2015.082330] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b808984c-f74e-46ef-8bf2-a6be93c73ec2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.088904] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2015.089154] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b85ba6e8-0748-487e-8d1e-e7c19d72336f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.091240] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2015.091413] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2015.092698] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c57963ef-8107-4960-850b-6b6d0fb57a81 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.096852] env[61663]: DEBUG oslo_vmware.api [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Waiting for the task: (returnval){ [ 2015.096852] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52fd42e7-0a2d-fd89-1515-1d5dca9e9aef" [ 2015.096852] env[61663]: _type = "Task" [ 2015.096852] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.103822] env[61663]: DEBUG oslo_vmware.api [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52fd42e7-0a2d-fd89-1515-1d5dca9e9aef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.160934] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2015.161195] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2015.161377] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Deleting the datastore file [datastore1] 1305216b-0ee5-499a-a82a-30b45a8c832c {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2015.161690] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-514fcd1c-b1e4-40d1-b049-2448b2f5f89b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.167945] env[61663]: DEBUG oslo_vmware.api [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Waiting for the task: (returnval){ [ 2015.167945] env[61663]: value = "task-1690811" [ 2015.167945] env[61663]: _type = "Task" [ 2015.167945] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.176719] env[61663]: DEBUG oslo_vmware.api [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Task: {'id': task-1690811, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.608068] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2015.608068] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Creating directory with path [datastore1] vmware_temp/778159ff-5fb3-41c5-b2a2-a137716dc4f0/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2015.608068] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-65ed8ace-3ddd-4300-a3e4-1a5dfc8c4049 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.618974] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Created directory with path [datastore1] vmware_temp/778159ff-5fb3-41c5-b2a2-a137716dc4f0/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2015.619180] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Fetch image to [datastore1] vmware_temp/778159ff-5fb3-41c5-b2a2-a137716dc4f0/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2015.619351] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/778159ff-5fb3-41c5-b2a2-a137716dc4f0/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2015.620067] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe24602-f449-4527-8970-720a896e1b7c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.626520] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25fee214-800a-4e70-bd65-c96fd7a0080a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.635486] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-704839e9-34f5-413c-beef-c8e566c8e477 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.665549] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68766d5f-4368-4341-842e-c8d56ccafd81 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.672691] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b5e99ef5-e04b-4edd-a86f-9116d510c3a5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.676971] env[61663]: DEBUG oslo_vmware.api [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Task: {'id': task-1690811, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078505} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.677529] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2015.677690] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2015.677886] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2015.678079] env[61663]: INFO nova.compute.manager [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2015.680128] env[61663]: DEBUG nova.compute.claims [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2015.680303] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2015.680513] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2015.691805] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2015.692448] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2015.699144] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2015.839431] env[61663]: DEBUG oslo_vmware.rw_handles [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/778159ff-5fb3-41c5-b2a2-a137716dc4f0/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2015.902766] env[61663]: DEBUG oslo_vmware.rw_handles [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2015.903349] env[61663]: DEBUG oslo_vmware.rw_handles [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/778159ff-5fb3-41c5-b2a2-a137716dc4f0/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2015.983570] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a32144b-f150-4aee-ae29-a54da6f61d99 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.992510] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca63f4c8-cc5f-42e3-bf8c-0cd248363509 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.023151] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0525fa0-6fbe-4bf6-8096-8f56e82cc356 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.030302] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-095448aa-18a4-4c19-ad17-319b9da96c43 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.043296] env[61663]: DEBUG nova.compute.provider_tree [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2016.052094] env[61663]: DEBUG nova.scheduler.client.report [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2016.066068] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.385s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.066599] env[61663]: ERROR nova.compute.manager [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2016.066599] env[61663]: Faults: ['InvalidArgument'] [ 2016.066599] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Traceback (most recent call last): [ 2016.066599] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2016.066599] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] self.driver.spawn(context, instance, image_meta, [ 2016.066599] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2016.066599] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2016.066599] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2016.066599] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] self._fetch_image_if_missing(context, vi) [ 2016.066599] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2016.066599] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] image_cache(vi, tmp_image_ds_loc) [ 2016.066599] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2016.066917] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] vm_util.copy_virtual_disk( [ 2016.066917] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2016.066917] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] session._wait_for_task(vmdk_copy_task) [ 2016.066917] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2016.066917] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] return self.wait_for_task(task_ref) [ 2016.066917] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2016.066917] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] return evt.wait() [ 2016.066917] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2016.066917] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] result = hub.switch() [ 2016.066917] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2016.066917] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] return self.greenlet.switch() [ 2016.066917] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2016.066917] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] self.f(*self.args, **self.kw) [ 2016.067188] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2016.067188] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] raise exceptions.translate_fault(task_info.error) [ 2016.067188] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2016.067188] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Faults: ['InvalidArgument'] [ 2016.067188] env[61663]: ERROR nova.compute.manager [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] [ 2016.067449] env[61663]: DEBUG nova.compute.utils [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2016.068742] env[61663]: DEBUG nova.compute.manager [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Build of instance 1305216b-0ee5-499a-a82a-30b45a8c832c was re-scheduled: A specified parameter was not correct: fileType [ 2016.068742] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2016.069126] env[61663]: DEBUG nova.compute.manager [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2016.069306] env[61663]: DEBUG nova.compute.manager [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2016.069560] env[61663]: DEBUG nova.compute.manager [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2016.069637] env[61663]: DEBUG nova.network.neutron [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2016.652088] env[61663]: DEBUG nova.network.neutron [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2016.663563] env[61663]: INFO nova.compute.manager [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Took 0.59 seconds to deallocate network for instance. [ 2016.789284] env[61663]: INFO nova.scheduler.client.report [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Deleted allocations for instance 1305216b-0ee5-499a-a82a-30b45a8c832c [ 2016.810026] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b3a91ef2-9123-4a8e-9021-55066b4220f5 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Lock "1305216b-0ee5-499a-a82a-30b45a8c832c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 676.336s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.811096] env[61663]: DEBUG oslo_concurrency.lockutils [None req-69e4b45a-48a1-465f-8bce-926dff054221 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Lock "1305216b-0ee5-499a-a82a-30b45a8c832c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 480.199s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2016.811332] env[61663]: DEBUG oslo_concurrency.lockutils [None req-69e4b45a-48a1-465f-8bce-926dff054221 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Acquiring lock "1305216b-0ee5-499a-a82a-30b45a8c832c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2016.811542] env[61663]: DEBUG oslo_concurrency.lockutils [None req-69e4b45a-48a1-465f-8bce-926dff054221 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Lock "1305216b-0ee5-499a-a82a-30b45a8c832c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2016.811708] env[61663]: DEBUG oslo_concurrency.lockutils [None req-69e4b45a-48a1-465f-8bce-926dff054221 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Lock "1305216b-0ee5-499a-a82a-30b45a8c832c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.813904] env[61663]: INFO nova.compute.manager [None req-69e4b45a-48a1-465f-8bce-926dff054221 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Terminating instance [ 2016.815575] env[61663]: DEBUG nova.compute.manager [None req-69e4b45a-48a1-465f-8bce-926dff054221 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2016.815778] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-69e4b45a-48a1-465f-8bce-926dff054221 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2016.816408] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3d4a80d9-f0d1-4669-b9ef-468dd5b1d9d4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.825890] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e00cbb-f297-4877-aa59-b57c368a8725 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.836886] env[61663]: DEBUG nova.compute.manager [None req-aaaf2e48-9df6-427b-99b5-5e2a1fdadadd tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: d62bc172-a64b-481e-a2fa-55ad4ccf73f5] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2016.858410] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-69e4b45a-48a1-465f-8bce-926dff054221 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1305216b-0ee5-499a-a82a-30b45a8c832c could not be found. [ 2016.858621] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-69e4b45a-48a1-465f-8bce-926dff054221 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2016.858799] env[61663]: INFO nova.compute.manager [None req-69e4b45a-48a1-465f-8bce-926dff054221 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2016.859053] env[61663]: DEBUG oslo.service.loopingcall [None req-69e4b45a-48a1-465f-8bce-926dff054221 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2016.859273] env[61663]: DEBUG nova.compute.manager [-] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2016.859371] env[61663]: DEBUG nova.network.neutron [-] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2016.861615] env[61663]: DEBUG nova.compute.manager [None req-aaaf2e48-9df6-427b-99b5-5e2a1fdadadd tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: d62bc172-a64b-481e-a2fa-55ad4ccf73f5] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 2016.881193] env[61663]: DEBUG oslo_concurrency.lockutils [None req-aaaf2e48-9df6-427b-99b5-5e2a1fdadadd tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Lock "d62bc172-a64b-481e-a2fa-55ad4ccf73f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.586s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.884030] env[61663]: DEBUG nova.network.neutron [-] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2016.889564] env[61663]: DEBUG nova.compute.manager [None req-fad726d7-dba3-4763-938c-f373b232209b tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 3444cec9-7da9-47d9-b669-cd1b4261e9d1] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2016.892172] env[61663]: INFO nova.compute.manager [-] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] Took 0.03 seconds to deallocate network for instance. [ 2016.910551] env[61663]: DEBUG nova.compute.manager [None req-fad726d7-dba3-4763-938c-f373b232209b tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 3444cec9-7da9-47d9-b669-cd1b4261e9d1] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 2016.930423] env[61663]: DEBUG oslo_concurrency.lockutils [None req-fad726d7-dba3-4763-938c-f373b232209b tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Lock "3444cec9-7da9-47d9-b669-cd1b4261e9d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.208s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.938785] env[61663]: DEBUG nova.compute.manager [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2016.987319] env[61663]: DEBUG oslo_concurrency.lockutils [None req-69e4b45a-48a1-465f-8bce-926dff054221 tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Lock "1305216b-0ee5-499a-a82a-30b45a8c832c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.176s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.988212] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "1305216b-0ee5-499a-a82a-30b45a8c832c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 227.902s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2016.988402] env[61663]: INFO nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 1305216b-0ee5-499a-a82a-30b45a8c832c] During sync_power_state the instance has a pending task (deleting). Skip. [ 2016.988603] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "1305216b-0ee5-499a-a82a-30b45a8c832c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.992578] env[61663]: DEBUG oslo_concurrency.lockutils [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2016.992809] env[61663]: DEBUG oslo_concurrency.lockutils [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2016.994323] env[61663]: INFO nova.compute.claims [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2017.235401] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f050ad9-876e-4e9b-92e5-8e9aa86ad50f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.242722] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3b7279-d85e-45ad-8ff0-24c2870dbcce {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.271364] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07fbd65f-a9a0-48d9-ae7f-cb717d3ba155 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.278324] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e1ca80-89e8-43c3-ba04-d802edb41442 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.292719] env[61663]: DEBUG nova.compute.provider_tree [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2017.300841] env[61663]: DEBUG nova.scheduler.client.report [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2017.314152] env[61663]: DEBUG oslo_concurrency.lockutils [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.321s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2017.314687] env[61663]: DEBUG nova.compute.manager [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2017.350190] env[61663]: DEBUG nova.compute.utils [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2017.351548] env[61663]: DEBUG nova.compute.manager [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2017.351728] env[61663]: DEBUG nova.network.neutron [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2017.363592] env[61663]: DEBUG nova.compute.manager [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2017.418804] env[61663]: DEBUG nova.policy [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4e773848d8cb4987bb4d6e5c5f5b6585', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a638dcbed35d49dfb7fcd458772f2b86', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 2017.424737] env[61663]: DEBUG nova.compute.manager [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2017.449652] env[61663]: DEBUG nova.virt.hardware [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2017.449956] env[61663]: DEBUG nova.virt.hardware [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2017.450072] env[61663]: DEBUG nova.virt.hardware [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2017.450263] env[61663]: DEBUG nova.virt.hardware [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2017.450411] env[61663]: DEBUG nova.virt.hardware [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2017.450560] env[61663]: DEBUG nova.virt.hardware [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2017.450767] env[61663]: DEBUG nova.virt.hardware [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2017.450950] env[61663]: DEBUG nova.virt.hardware [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2017.451494] env[61663]: DEBUG nova.virt.hardware [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2017.451494] env[61663]: DEBUG nova.virt.hardware [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2017.451494] env[61663]: DEBUG nova.virt.hardware [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2017.452355] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae61fd4b-7a96-459e-b30c-31baf0c22794 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.460455] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ec5e27-5a3b-44bb-91aa-4ab46c6c9350 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.687159] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2017.814766] env[61663]: DEBUG nova.network.neutron [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Successfully created port: 4e8b4c71-a946-43a1-baf9-314b43de25b7 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2018.280860] env[61663]: DEBUG nova.network.neutron [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Successfully created port: c54ff757-6dc2-4cc4-800e-53b85e12d745 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2019.001959] env[61663]: DEBUG nova.compute.manager [req-c4972dff-acd6-415d-87d3-bb4378bfcf94 req-16351b19-d1d9-40d0-a4fb-49eff924de1f service nova] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Received event network-vif-plugged-4e8b4c71-a946-43a1-baf9-314b43de25b7 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2019.002209] env[61663]: DEBUG oslo_concurrency.lockutils [req-c4972dff-acd6-415d-87d3-bb4378bfcf94 req-16351b19-d1d9-40d0-a4fb-49eff924de1f service nova] Acquiring lock "ae347f45-f39e-47eb-9e37-80ddfc502c27-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2019.002376] env[61663]: DEBUG oslo_concurrency.lockutils [req-c4972dff-acd6-415d-87d3-bb4378bfcf94 req-16351b19-d1d9-40d0-a4fb-49eff924de1f service nova] Lock "ae347f45-f39e-47eb-9e37-80ddfc502c27-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2019.002521] env[61663]: DEBUG oslo_concurrency.lockutils [req-c4972dff-acd6-415d-87d3-bb4378bfcf94 req-16351b19-d1d9-40d0-a4fb-49eff924de1f service nova] Lock "ae347f45-f39e-47eb-9e37-80ddfc502c27-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.002683] env[61663]: DEBUG nova.compute.manager [req-c4972dff-acd6-415d-87d3-bb4378bfcf94 req-16351b19-d1d9-40d0-a4fb-49eff924de1f service nova] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] No waiting events found dispatching network-vif-plugged-4e8b4c71-a946-43a1-baf9-314b43de25b7 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2019.002844] env[61663]: WARNING nova.compute.manager [req-c4972dff-acd6-415d-87d3-bb4378bfcf94 req-16351b19-d1d9-40d0-a4fb-49eff924de1f service nova] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Received unexpected event network-vif-plugged-4e8b4c71-a946-43a1-baf9-314b43de25b7 for instance with vm_state building and task_state spawning. [ 2019.111276] env[61663]: DEBUG nova.network.neutron [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Successfully updated port: 4e8b4c71-a946-43a1-baf9-314b43de25b7 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2019.693394] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2019.693394] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2019.693394] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2019.718027] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2019.718027] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2019.718027] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2019.718386] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2019.718621] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2019.718838] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2019.719927] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2019.719927] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2019.719927] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2019.719927] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2019.719927] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2019.842772] env[61663]: DEBUG nova.network.neutron [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Successfully updated port: c54ff757-6dc2-4cc4-800e-53b85e12d745 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2019.851568] env[61663]: DEBUG oslo_concurrency.lockutils [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Acquiring lock "refresh_cache-ae347f45-f39e-47eb-9e37-80ddfc502c27" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2019.851854] env[61663]: DEBUG oslo_concurrency.lockutils [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Acquired lock "refresh_cache-ae347f45-f39e-47eb-9e37-80ddfc502c27" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2019.852107] env[61663]: DEBUG nova.network.neutron [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2020.154695] env[61663]: DEBUG nova.network.neutron [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2020.691987] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2020.740194] env[61663]: DEBUG nova.network.neutron [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Updating instance_info_cache with network_info: [{"id": "4e8b4c71-a946-43a1-baf9-314b43de25b7", "address": "fa:16:3e:e6:19:e3", "network": {"id": "8232ac9a-1fac-487f-920e-5461d0651d3d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-602301913", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a638dcbed35d49dfb7fcd458772f2b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e8b4c71-a9", "ovs_interfaceid": "4e8b4c71-a946-43a1-baf9-314b43de25b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c54ff757-6dc2-4cc4-800e-53b85e12d745", "address": "fa:16:3e:87:86:8a", "network": {"id": "198d39e8-0d65-4e54-a7bd-6cae6f090b30", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1280124458", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.246", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a638dcbed35d49dfb7fcd458772f2b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc54ff757-6d", "ovs_interfaceid": "c54ff757-6dc2-4cc4-800e-53b85e12d745", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2020.752208] env[61663]: DEBUG oslo_concurrency.lockutils [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Releasing lock "refresh_cache-ae347f45-f39e-47eb-9e37-80ddfc502c27" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2020.752532] env[61663]: DEBUG nova.compute.manager [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Instance network_info: |[{"id": "4e8b4c71-a946-43a1-baf9-314b43de25b7", "address": "fa:16:3e:e6:19:e3", "network": {"id": "8232ac9a-1fac-487f-920e-5461d0651d3d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-602301913", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a638dcbed35d49dfb7fcd458772f2b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e8b4c71-a9", "ovs_interfaceid": "4e8b4c71-a946-43a1-baf9-314b43de25b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c54ff757-6dc2-4cc4-800e-53b85e12d745", "address": "fa:16:3e:87:86:8a", "network": {"id": "198d39e8-0d65-4e54-a7bd-6cae6f090b30", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1280124458", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.246", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a638dcbed35d49dfb7fcd458772f2b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc54ff757-6d", "ovs_interfaceid": "c54ff757-6dc2-4cc4-800e-53b85e12d745", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2020.752959] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:19:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0549820d-5649-40bc-ad6e-9ae27b384d90', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4e8b4c71-a946-43a1-baf9-314b43de25b7', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:86:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '790c811b-3fa6-49f8-87ac-c51450911137', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c54ff757-6dc2-4cc4-800e-53b85e12d745', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2020.762415] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Creating folder: Project (a638dcbed35d49dfb7fcd458772f2b86). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2020.762908] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c982413d-3084-4258-8743-5b1d026a970b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.775450] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Created folder: Project (a638dcbed35d49dfb7fcd458772f2b86) in parent group-v352575. [ 2020.775633] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Creating folder: Instances. Parent ref: group-v352655. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2020.775853] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-884338cd-f1a0-49a7-9145-c1e767263d18 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.785312] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Created folder: Instances in parent group-v352655. [ 2020.785534] env[61663]: DEBUG oslo.service.loopingcall [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2020.785712] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2020.785899] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-46263246-24bd-4d0c-8551-2844d5c00d70 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.807208] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2020.807208] env[61663]: value = "task-1690814" [ 2020.807208] env[61663]: _type = "Task" [ 2020.807208] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.814248] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690814, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.030314] env[61663]: DEBUG nova.compute.manager [req-600bdaf1-21b1-4ad3-bf6a-20c735b248bb req-679e0336-7c1f-4454-9b58-f1e1e30f593b service nova] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Received event network-changed-4e8b4c71-a946-43a1-baf9-314b43de25b7 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2021.030701] env[61663]: DEBUG nova.compute.manager [req-600bdaf1-21b1-4ad3-bf6a-20c735b248bb req-679e0336-7c1f-4454-9b58-f1e1e30f593b service nova] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Refreshing instance network info cache due to event network-changed-4e8b4c71-a946-43a1-baf9-314b43de25b7. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2021.030766] env[61663]: DEBUG oslo_concurrency.lockutils [req-600bdaf1-21b1-4ad3-bf6a-20c735b248bb req-679e0336-7c1f-4454-9b58-f1e1e30f593b service nova] Acquiring lock "refresh_cache-ae347f45-f39e-47eb-9e37-80ddfc502c27" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2021.030928] env[61663]: DEBUG oslo_concurrency.lockutils [req-600bdaf1-21b1-4ad3-bf6a-20c735b248bb req-679e0336-7c1f-4454-9b58-f1e1e30f593b service nova] Acquired lock "refresh_cache-ae347f45-f39e-47eb-9e37-80ddfc502c27" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2021.031113] env[61663]: DEBUG nova.network.neutron [req-600bdaf1-21b1-4ad3-bf6a-20c735b248bb req-679e0336-7c1f-4454-9b58-f1e1e30f593b service nova] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Refreshing network info cache for port 4e8b4c71-a946-43a1-baf9-314b43de25b7 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2021.316804] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690814, 'name': CreateVM_Task, 'duration_secs': 0.336646} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.317174] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2021.317722] env[61663]: DEBUG oslo_concurrency.lockutils [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2021.317928] env[61663]: DEBUG oslo_concurrency.lockutils [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2021.318230] env[61663]: DEBUG oslo_concurrency.lockutils [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2021.318467] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f4ba9f4-af0e-42ef-8fa3-34e325ab3691 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.322755] env[61663]: DEBUG oslo_vmware.api [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Waiting for the task: (returnval){ [ 2021.322755] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52aa9861-1225-e749-a9cb-15382822e555" [ 2021.322755] env[61663]: _type = "Task" [ 2021.322755] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.330781] env[61663]: DEBUG oslo_vmware.api [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52aa9861-1225-e749-a9cb-15382822e555, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.355167] env[61663]: DEBUG nova.network.neutron [req-600bdaf1-21b1-4ad3-bf6a-20c735b248bb req-679e0336-7c1f-4454-9b58-f1e1e30f593b service nova] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Updated VIF entry in instance network info cache for port 4e8b4c71-a946-43a1-baf9-314b43de25b7. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2021.355597] env[61663]: DEBUG nova.network.neutron [req-600bdaf1-21b1-4ad3-bf6a-20c735b248bb req-679e0336-7c1f-4454-9b58-f1e1e30f593b service nova] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Updating instance_info_cache with network_info: [{"id": "4e8b4c71-a946-43a1-baf9-314b43de25b7", "address": "fa:16:3e:e6:19:e3", "network": {"id": "8232ac9a-1fac-487f-920e-5461d0651d3d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-602301913", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a638dcbed35d49dfb7fcd458772f2b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e8b4c71-a9", "ovs_interfaceid": "4e8b4c71-a946-43a1-baf9-314b43de25b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c54ff757-6dc2-4cc4-800e-53b85e12d745", "address": "fa:16:3e:87:86:8a", "network": {"id": "198d39e8-0d65-4e54-a7bd-6cae6f090b30", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1280124458", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.246", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a638dcbed35d49dfb7fcd458772f2b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc54ff757-6d", "ovs_interfaceid": "c54ff757-6dc2-4cc4-800e-53b85e12d745", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2021.372227] env[61663]: DEBUG oslo_concurrency.lockutils [req-600bdaf1-21b1-4ad3-bf6a-20c735b248bb req-679e0336-7c1f-4454-9b58-f1e1e30f593b service nova] Releasing lock "refresh_cache-ae347f45-f39e-47eb-9e37-80ddfc502c27" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2021.372462] env[61663]: DEBUG nova.compute.manager [req-600bdaf1-21b1-4ad3-bf6a-20c735b248bb req-679e0336-7c1f-4454-9b58-f1e1e30f593b service nova] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Received event network-vif-plugged-c54ff757-6dc2-4cc4-800e-53b85e12d745 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2021.372650] env[61663]: DEBUG oslo_concurrency.lockutils [req-600bdaf1-21b1-4ad3-bf6a-20c735b248bb req-679e0336-7c1f-4454-9b58-f1e1e30f593b service nova] Acquiring lock "ae347f45-f39e-47eb-9e37-80ddfc502c27-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2021.372849] env[61663]: DEBUG oslo_concurrency.lockutils [req-600bdaf1-21b1-4ad3-bf6a-20c735b248bb req-679e0336-7c1f-4454-9b58-f1e1e30f593b service nova] Lock "ae347f45-f39e-47eb-9e37-80ddfc502c27-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2021.373017] env[61663]: DEBUG oslo_concurrency.lockutils [req-600bdaf1-21b1-4ad3-bf6a-20c735b248bb req-679e0336-7c1f-4454-9b58-f1e1e30f593b service nova] Lock "ae347f45-f39e-47eb-9e37-80ddfc502c27-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2021.373188] env[61663]: DEBUG nova.compute.manager [req-600bdaf1-21b1-4ad3-bf6a-20c735b248bb req-679e0336-7c1f-4454-9b58-f1e1e30f593b service nova] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] No waiting events found dispatching network-vif-plugged-c54ff757-6dc2-4cc4-800e-53b85e12d745 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2021.373351] env[61663]: WARNING nova.compute.manager [req-600bdaf1-21b1-4ad3-bf6a-20c735b248bb req-679e0336-7c1f-4454-9b58-f1e1e30f593b service nova] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Received unexpected event network-vif-plugged-c54ff757-6dc2-4cc4-800e-53b85e12d745 for instance with vm_state building and task_state spawning. [ 2021.373511] env[61663]: DEBUG nova.compute.manager [req-600bdaf1-21b1-4ad3-bf6a-20c735b248bb req-679e0336-7c1f-4454-9b58-f1e1e30f593b service nova] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Received event network-changed-c54ff757-6dc2-4cc4-800e-53b85e12d745 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2021.373663] env[61663]: DEBUG nova.compute.manager [req-600bdaf1-21b1-4ad3-bf6a-20c735b248bb req-679e0336-7c1f-4454-9b58-f1e1e30f593b service nova] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Refreshing instance network info cache due to event network-changed-c54ff757-6dc2-4cc4-800e-53b85e12d745. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2021.373842] env[61663]: DEBUG oslo_concurrency.lockutils [req-600bdaf1-21b1-4ad3-bf6a-20c735b248bb req-679e0336-7c1f-4454-9b58-f1e1e30f593b service nova] Acquiring lock "refresh_cache-ae347f45-f39e-47eb-9e37-80ddfc502c27" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2021.374039] env[61663]: DEBUG oslo_concurrency.lockutils [req-600bdaf1-21b1-4ad3-bf6a-20c735b248bb req-679e0336-7c1f-4454-9b58-f1e1e30f593b service nova] Acquired lock "refresh_cache-ae347f45-f39e-47eb-9e37-80ddfc502c27" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2021.374220] env[61663]: DEBUG nova.network.neutron [req-600bdaf1-21b1-4ad3-bf6a-20c735b248bb req-679e0336-7c1f-4454-9b58-f1e1e30f593b service nova] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Refreshing network info cache for port c54ff757-6dc2-4cc4-800e-53b85e12d745 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2021.691522] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2021.691742] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2021.691906] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2021.707243] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2021.707461] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2021.707625] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2021.707780] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2021.708882] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0dbca4b-d306-4f0c-a9b0-333e8088fc73 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.713654] env[61663]: DEBUG nova.network.neutron [req-600bdaf1-21b1-4ad3-bf6a-20c735b248bb req-679e0336-7c1f-4454-9b58-f1e1e30f593b service nova] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Updated VIF entry in instance network info cache for port c54ff757-6dc2-4cc4-800e-53b85e12d745. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2021.714223] env[61663]: DEBUG nova.network.neutron [req-600bdaf1-21b1-4ad3-bf6a-20c735b248bb req-679e0336-7c1f-4454-9b58-f1e1e30f593b service nova] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Updating instance_info_cache with network_info: [{"id": "4e8b4c71-a946-43a1-baf9-314b43de25b7", "address": "fa:16:3e:e6:19:e3", "network": {"id": "8232ac9a-1fac-487f-920e-5461d0651d3d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-602301913", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.48", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a638dcbed35d49dfb7fcd458772f2b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e8b4c71-a9", "ovs_interfaceid": "4e8b4c71-a946-43a1-baf9-314b43de25b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c54ff757-6dc2-4cc4-800e-53b85e12d745", "address": "fa:16:3e:87:86:8a", "network": {"id": "198d39e8-0d65-4e54-a7bd-6cae6f090b30", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1280124458", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.246", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a638dcbed35d49dfb7fcd458772f2b86", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc54ff757-6d", "ovs_interfaceid": "c54ff757-6dc2-4cc4-800e-53b85e12d745", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2021.718561] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9ea647b-f609-4c9d-a770-f35209069963 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.724150] env[61663]: DEBUG oslo_concurrency.lockutils [req-600bdaf1-21b1-4ad3-bf6a-20c735b248bb req-679e0336-7c1f-4454-9b58-f1e1e30f593b service nova] Releasing lock "refresh_cache-ae347f45-f39e-47eb-9e37-80ddfc502c27" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2021.732585] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2125850-e507-469e-9403-30e8f38d43b6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.739176] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8bb8f81-9b24-4bd7-98c2-e7d738f33c88 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.768949] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181291MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2021.769135] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2021.769299] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2021.836368] env[61663]: DEBUG oslo_concurrency.lockutils [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2021.836681] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2021.836820] env[61663]: DEBUG oslo_concurrency.lockutils [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2021.854637] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 47e1551e-ac80-4b4e-b568-3931c6dcf3b3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2021.854783] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2021.854906] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ef8528db-1338-4af6-9d4a-5eda7fe69a98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2021.855042] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 94f7665c-5247-4474-a9ea-700f1778af81 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2021.855166] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 768bef02-a114-4cac-a614-6e8a04ce0d18 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2021.855286] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2021.855403] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b51a331f-2b96-457f-9c9e-99379e8ae7fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2021.855517] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 440175fc-da0c-4ea3-9a74-46e97e32658b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2021.855631] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 668c457f-7ebc-441f-8ece-cc63c571363b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2021.866949] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 59d4580f-5897-42d6-82cb-0aead4d2658c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2021.867107] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ae347f45-f39e-47eb-9e37-80ddfc502c27 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2021.877858] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 525749ba-7de2-4ec5-8f7b-1f4c291710fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2021.887933] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 740f7887-4a5c-4889-9635-e9d9c6607ee7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2021.901546] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 44a9bf55-1c16-49aa-a61f-611696fb2c54 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2021.912181] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 54a78c20-cbf6-453b-88e4-2fb4da0a6200 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2021.921354] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 8e705624-9787-4d34-a3d4-f56b7b4fdcc2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2021.930149] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 12ecd7c0-dcb3-42f6-8560-c239f786254c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2021.930371] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2021.930515] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2022.122770] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef1e5ce-b547-45d2-a240-6cfa07968214 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.129708] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-008828a6-74db-4f9b-a8f0-7c2620a4ec0e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.159058] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0299a815-4b84-4772-8b51-7b8f376838db {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.165611] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef988400-7bf1-456f-8e7e-6396623c51c1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.178339] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2022.187373] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2022.201833] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2022.202076] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.433s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2027.202067] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2027.202409] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2030.688201] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2043.764865] env[61663]: DEBUG oslo_concurrency.lockutils [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "e47c9821-f815-4bd5-bf00-8822f08e3333" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2043.764865] env[61663]: DEBUG oslo_concurrency.lockutils [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "e47c9821-f815-4bd5-bf00-8822f08e3333" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2061.144569] env[61663]: DEBUG oslo_concurrency.lockutils [None req-64809948-2809-4b09-a9ed-7c2ea3d83831 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Acquiring lock "ae347f45-f39e-47eb-9e37-80ddfc502c27" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2064.358175] env[61663]: WARNING oslo_vmware.rw_handles [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2064.358175] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2064.358175] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2064.358175] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2064.358175] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2064.358175] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 2064.358175] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2064.358175] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2064.358175] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2064.358175] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2064.358175] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2064.358175] env[61663]: ERROR oslo_vmware.rw_handles [ 2064.358919] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/778159ff-5fb3-41c5-b2a2-a137716dc4f0/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2064.360885] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2064.361189] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Copying Virtual Disk [datastore1] vmware_temp/778159ff-5fb3-41c5-b2a2-a137716dc4f0/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/778159ff-5fb3-41c5-b2a2-a137716dc4f0/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2064.361495] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d549633c-acb1-4173-8d2c-9f5abfe65140 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.370497] env[61663]: DEBUG oslo_vmware.api [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Waiting for the task: (returnval){ [ 2064.370497] env[61663]: value = "task-1690815" [ 2064.370497] env[61663]: _type = "Task" [ 2064.370497] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.378976] env[61663]: DEBUG oslo_vmware.api [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Task: {'id': task-1690815, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.882555] env[61663]: DEBUG oslo_vmware.exceptions [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2064.882901] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2064.883491] env[61663]: ERROR nova.compute.manager [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2064.883491] env[61663]: Faults: ['InvalidArgument'] [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Traceback (most recent call last): [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] yield resources [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] self.driver.spawn(context, instance, image_meta, [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] self._fetch_image_if_missing(context, vi) [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] image_cache(vi, tmp_image_ds_loc) [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] vm_util.copy_virtual_disk( [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] session._wait_for_task(vmdk_copy_task) [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] return self.wait_for_task(task_ref) [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] return evt.wait() [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] result = hub.switch() [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] return self.greenlet.switch() [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] self.f(*self.args, **self.kw) [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] raise exceptions.translate_fault(task_info.error) [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Faults: ['InvalidArgument'] [ 2064.883491] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] [ 2064.884406] env[61663]: INFO nova.compute.manager [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Terminating instance [ 2064.885406] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2064.885621] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2064.886236] env[61663]: DEBUG nova.compute.manager [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2064.886432] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2064.886659] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a090820-6c70-474a-9db5-db9e49ffae41 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.888952] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd85983-b108-4513-99c4-c330361b5fca {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.895894] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2064.896119] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a7a4f1f-78db-4f4e-a2ff-41d531a62754 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.910384] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2064.910556] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2064.911235] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4cb314be-9713-48e1-ba0c-84d4972d17ea {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.916121] env[61663]: DEBUG oslo_vmware.api [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Waiting for the task: (returnval){ [ 2064.916121] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f569c1-b3d1-f605-4600-cf0f2be1484f" [ 2064.916121] env[61663]: _type = "Task" [ 2064.916121] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.924077] env[61663]: DEBUG oslo_vmware.api [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f569c1-b3d1-f605-4600-cf0f2be1484f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.969672] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2064.969897] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2064.970097] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Deleting the datastore file [datastore1] 47e1551e-ac80-4b4e-b568-3931c6dcf3b3 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2064.970364] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-340f2428-ddd9-4418-82d1-2d0064d51428 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.986101] env[61663]: DEBUG oslo_vmware.api [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Waiting for the task: (returnval){ [ 2064.986101] env[61663]: value = "task-1690817" [ 2064.986101] env[61663]: _type = "Task" [ 2064.986101] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.997109] env[61663]: DEBUG oslo_vmware.api [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Task: {'id': task-1690817, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.430060] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2065.432967] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Creating directory with path [datastore1] vmware_temp/2dea3b11-63fc-4976-bfb8-dc0d96053639/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2065.432967] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-71b696a3-6c4c-4fdb-b96e-d8e9f88778ba {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.445643] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Created directory with path [datastore1] vmware_temp/2dea3b11-63fc-4976-bfb8-dc0d96053639/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2065.446316] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Fetch image to [datastore1] vmware_temp/2dea3b11-63fc-4976-bfb8-dc0d96053639/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2065.447073] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/2dea3b11-63fc-4976-bfb8-dc0d96053639/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2065.448064] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df47fcb-0cf0-4d80-9369-b2367750d54d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.455868] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc1f1019-b661-4cbf-80ce-49f7d45008eb {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.469378] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02c3df8-e388-4aca-9bfa-4cd71c611a25 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.508528] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b206a90f-ca82-414f-91e6-53eef4bd89e6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.517447] env[61663]: DEBUG oslo_vmware.api [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Task: {'id': task-1690817, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098593} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.519407] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2065.519754] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2065.520065] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2065.520364] env[61663]: INFO nova.compute.manager [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Took 0.63 seconds to destroy the instance on the hypervisor. [ 2065.522504] env[61663]: DEBUG nova.compute.claims [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2065.523063] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2065.523063] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2065.525803] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4967ad32-109b-4b75-9b1f-2d2d83363d8c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.549018] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2065.757733] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2065.759362] env[61663]: ERROR nova.compute.manager [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 362c8152-fcd0-4f43-acbf-09a2dc376cb2. [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Traceback (most recent call last): [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] result = getattr(controller, method)(*args, **kwargs) [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self._get(image_id) [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] resp, body = self.http_client.get(url, headers=header) [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self.request(url, 'GET', **kwargs) [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self._handle_response(resp) [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] raise exc.from_response(resp, resp.content) [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] During handling of the above exception, another exception occurred: [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Traceback (most recent call last): [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] yield resources [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] self.driver.spawn(context, instance, image_meta, [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] self._fetch_image_if_missing(context, vi) [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] image_fetch(context, vi, tmp_image_ds_loc) [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] images.fetch_image( [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2065.759362] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] metadata = IMAGE_API.get(context, image_ref) [ 2065.760413] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 2065.760413] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return session.show(context, image_id, [ 2065.760413] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2065.760413] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] _reraise_translated_image_exception(image_id) [ 2065.760413] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 2065.760413] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] raise new_exc.with_traceback(exc_trace) [ 2065.760413] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2065.760413] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2065.760413] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2065.760413] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] result = getattr(controller, method)(*args, **kwargs) [ 2065.760413] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2065.760413] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self._get(image_id) [ 2065.760413] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2065.760413] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2065.760413] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2065.760413] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] resp, body = self.http_client.get(url, headers=header) [ 2065.760413] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2065.760413] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self.request(url, 'GET', **kwargs) [ 2065.760413] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2065.760413] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self._handle_response(resp) [ 2065.760413] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2065.760413] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] raise exc.from_response(resp, resp.content) [ 2065.760413] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] nova.exception.ImageNotAuthorized: Not authorized for image 362c8152-fcd0-4f43-acbf-09a2dc376cb2. [ 2065.760413] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] [ 2065.760413] env[61663]: INFO nova.compute.manager [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Terminating instance [ 2065.761274] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2065.761482] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2065.762104] env[61663]: DEBUG nova.compute.manager [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2065.762661] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2065.762934] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2add0078-9c9e-486a-85f9-ddc1fc7d81ac {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.765854] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a231dd6b-dcd0-46dd-a801-8200959fe508 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.775980] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2065.776255] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c667480b-7574-4958-9bd4-e298a53869a3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.778538] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2065.778716] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2065.779685] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b97d387-2df5-43e4-bb02-35b172e8098e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.789064] env[61663]: DEBUG oslo_vmware.api [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Waiting for the task: (returnval){ [ 2065.789064] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c7b8c5-b3a3-a476-0c7f-d40d182e3e5d" [ 2065.789064] env[61663]: _type = "Task" [ 2065.789064] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.797587] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2065.797587] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Creating directory with path [datastore1] vmware_temp/3ab9dd93-52cd-4690-9f52-59560cae578e/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2065.797587] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1562c598-67a5-4c0a-8919-b40fd1f628b3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.818516] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Created directory with path [datastore1] vmware_temp/3ab9dd93-52cd-4690-9f52-59560cae578e/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2065.818651] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Fetch image to [datastore1] vmware_temp/3ab9dd93-52cd-4690-9f52-59560cae578e/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2065.822113] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/3ab9dd93-52cd-4690-9f52-59560cae578e/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2065.822113] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a73563a-997b-415e-b589-d148853983f5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.828162] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb0fb02-5d02-41b4-82d9-a6dedf6c6055 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.840853] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-632a5098-84cd-4b3e-914a-307b5407f510 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.848875] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2065.848875] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2065.848993] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Deleting the datastore file [datastore1] 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2065.849806] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-901e9b0c-149b-4190-b28e-a59864aad58a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.876937] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223b3e9f-608a-4516-9ee2-b9a0d3bc78e8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.880162] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e1ec65-c640-4edd-babb-31abf699f9f7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.885356] env[61663]: DEBUG oslo_vmware.api [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Waiting for the task: (returnval){ [ 2065.885356] env[61663]: value = "task-1690819" [ 2065.885356] env[61663]: _type = "Task" [ 2065.885356] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.892837] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-46b37710-6729-47e2-9f52-1fab90ae7178 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.898704] env[61663]: DEBUG oslo_vmware.api [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Task: {'id': task-1690819, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.901104] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c390c9f7-6578-4fd3-996a-3d5f366c2b06 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.935118] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7c9bbe8-2c06-4e71-8bba-55b660a23785 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.938087] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2065.944576] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46ecf7d4-3691-413c-8c51-0fcce739baab {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.958419] env[61663]: DEBUG nova.compute.provider_tree [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2065.970841] env[61663]: DEBUG nova.scheduler.client.report [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2065.990154] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.467s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2065.990705] env[61663]: ERROR nova.compute.manager [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2065.990705] env[61663]: Faults: ['InvalidArgument'] [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Traceback (most recent call last): [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] self.driver.spawn(context, instance, image_meta, [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] self._fetch_image_if_missing(context, vi) [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] image_cache(vi, tmp_image_ds_loc) [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] vm_util.copy_virtual_disk( [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] session._wait_for_task(vmdk_copy_task) [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] return self.wait_for_task(task_ref) [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] return evt.wait() [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] result = hub.switch() [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] return self.greenlet.switch() [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] self.f(*self.args, **self.kw) [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] raise exceptions.translate_fault(task_info.error) [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Faults: ['InvalidArgument'] [ 2065.990705] env[61663]: ERROR nova.compute.manager [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] [ 2065.991442] env[61663]: DEBUG nova.compute.utils [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2065.994938] env[61663]: DEBUG nova.compute.manager [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Build of instance 47e1551e-ac80-4b4e-b568-3931c6dcf3b3 was re-scheduled: A specified parameter was not correct: fileType [ 2065.994938] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2065.996555] env[61663]: DEBUG nova.compute.manager [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2065.996555] env[61663]: DEBUG nova.compute.manager [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2065.996555] env[61663]: DEBUG nova.compute.manager [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2065.996555] env[61663]: DEBUG nova.network.neutron [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2066.116968] env[61663]: DEBUG oslo_vmware.rw_handles [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3ab9dd93-52cd-4690-9f52-59560cae578e/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2066.184020] env[61663]: DEBUG oslo_vmware.rw_handles [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2066.184020] env[61663]: DEBUG oslo_vmware.rw_handles [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3ab9dd93-52cd-4690-9f52-59560cae578e/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2066.396822] env[61663]: DEBUG oslo_vmware.api [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Task: {'id': task-1690819, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072172} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.396822] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2066.397021] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2066.397380] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2066.397380] env[61663]: INFO nova.compute.manager [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Took 0.64 seconds to destroy the instance on the hypervisor. [ 2066.399449] env[61663]: DEBUG nova.compute.claims [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2066.399625] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2066.399868] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2066.754924] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beab9cb1-ce79-4f64-9c78-96d15c863678 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.762640] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab4abef-6bd8-4ff7-af8c-09ddc9854fce {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.801046] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-537975d3-6dca-41b4-8fac-1b1825192d04 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.808955] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9805567-00cc-40a7-a0d5-ef6ba012e7c0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.822776] env[61663]: DEBUG nova.compute.provider_tree [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2066.834717] env[61663]: DEBUG nova.scheduler.client.report [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2066.853290] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.453s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2066.854273] env[61663]: ERROR nova.compute.manager [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 362c8152-fcd0-4f43-acbf-09a2dc376cb2. [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Traceback (most recent call last): [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] result = getattr(controller, method)(*args, **kwargs) [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self._get(image_id) [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] resp, body = self.http_client.get(url, headers=header) [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self.request(url, 'GET', **kwargs) [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self._handle_response(resp) [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] raise exc.from_response(resp, resp.content) [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] During handling of the above exception, another exception occurred: [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Traceback (most recent call last): [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] self.driver.spawn(context, instance, image_meta, [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] self._fetch_image_if_missing(context, vi) [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] image_fetch(context, vi, tmp_image_ds_loc) [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] images.fetch_image( [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] metadata = IMAGE_API.get(context, image_ref) [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 2066.854273] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return session.show(context, image_id, [ 2066.855207] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2066.855207] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] _reraise_translated_image_exception(image_id) [ 2066.855207] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 2066.855207] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] raise new_exc.with_traceback(exc_trace) [ 2066.855207] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2066.855207] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2066.855207] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2066.855207] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] result = getattr(controller, method)(*args, **kwargs) [ 2066.855207] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2066.855207] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self._get(image_id) [ 2066.855207] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2066.855207] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2066.855207] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2066.855207] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] resp, body = self.http_client.get(url, headers=header) [ 2066.855207] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2066.855207] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self.request(url, 'GET', **kwargs) [ 2066.855207] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2066.855207] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self._handle_response(resp) [ 2066.855207] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2066.855207] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] raise exc.from_response(resp, resp.content) [ 2066.855207] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] nova.exception.ImageNotAuthorized: Not authorized for image 362c8152-fcd0-4f43-acbf-09a2dc376cb2. [ 2066.855207] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] [ 2066.855207] env[61663]: DEBUG nova.compute.utils [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Not authorized for image 362c8152-fcd0-4f43-acbf-09a2dc376cb2. {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2066.861802] env[61663]: DEBUG nova.compute.manager [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Build of instance 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7 was re-scheduled: Not authorized for image 362c8152-fcd0-4f43-acbf-09a2dc376cb2. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2066.862314] env[61663]: DEBUG nova.compute.manager [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2066.862484] env[61663]: DEBUG nova.compute.manager [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2066.862643] env[61663]: DEBUG nova.compute.manager [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2066.862847] env[61663]: DEBUG nova.network.neutron [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2066.907761] env[61663]: DEBUG nova.network.neutron [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2066.925248] env[61663]: INFO nova.compute.manager [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Took 0.93 seconds to deallocate network for instance. [ 2067.039192] env[61663]: DEBUG neutronclient.v2_0.client [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61663) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2067.041191] env[61663]: ERROR nova.compute.manager [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Traceback (most recent call last): [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] result = getattr(controller, method)(*args, **kwargs) [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self._get(image_id) [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] resp, body = self.http_client.get(url, headers=header) [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self.request(url, 'GET', **kwargs) [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self._handle_response(resp) [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] raise exc.from_response(resp, resp.content) [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] During handling of the above exception, another exception occurred: [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Traceback (most recent call last): [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] self.driver.spawn(context, instance, image_meta, [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] self._fetch_image_if_missing(context, vi) [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] image_fetch(context, vi, tmp_image_ds_loc) [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] images.fetch_image( [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] metadata = IMAGE_API.get(context, image_ref) [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 2067.041191] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return session.show(context, image_id, [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] _reraise_translated_image_exception(image_id) [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] raise new_exc.with_traceback(exc_trace) [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] result = getattr(controller, method)(*args, **kwargs) [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self._get(image_id) [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] resp, body = self.http_client.get(url, headers=header) [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self.request(url, 'GET', **kwargs) [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self._handle_response(resp) [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] raise exc.from_response(resp, resp.content) [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] nova.exception.ImageNotAuthorized: Not authorized for image 362c8152-fcd0-4f43-acbf-09a2dc376cb2. [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] During handling of the above exception, another exception occurred: [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Traceback (most recent call last): [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] self._build_and_run_instance(context, instance, image, [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/compute/manager.py", line 2739, in _build_and_run_instance [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] raise exception.RescheduledException( [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] nova.exception.RescheduledException: Build of instance 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7 was re-scheduled: Not authorized for image 362c8152-fcd0-4f43-acbf-09a2dc376cb2. [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] During handling of the above exception, another exception occurred: [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Traceback (most recent call last): [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] ret = obj(*args, **kwargs) [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] exception_handler_v20(status_code, error_body) [ 2067.042208] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] raise client_exc(message=error_message, [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Neutron server returns request_ids: ['req-7294559d-8636-4ae6-a14f-679aba0d9319'] [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] During handling of the above exception, another exception occurred: [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Traceback (most recent call last): [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] self._deallocate_network(context, instance, requested_networks) [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] self.network_api.deallocate_for_instance( [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] data = neutron.list_ports(**search_opts) [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] ret = obj(*args, **kwargs) [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self.list('ports', self.ports_path, retrieve_all, [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] ret = obj(*args, **kwargs) [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] for r in self._pagination(collection, path, **params): [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] res = self.get(path, params=params) [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] ret = obj(*args, **kwargs) [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self.retry_request("GET", action, body=body, [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] ret = obj(*args, **kwargs) [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self.do_request(method, action, body=body, [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] ret = obj(*args, **kwargs) [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] self._handle_fault_response(status_code, replybody, resp) [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] raise exception.Unauthorized() [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] nova.exception.Unauthorized: Not authorized. [ 2067.043171] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] [ 2067.045245] env[61663]: INFO nova.scheduler.client.report [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Deleted allocations for instance 47e1551e-ac80-4b4e-b568-3931c6dcf3b3 [ 2067.101049] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4a7c06b9-3728-45ba-ae5a-108e05bb09e3 tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Lock "47e1551e-ac80-4b4e-b568-3931c6dcf3b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 685.444s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.102808] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0e43f8ae-3cca-4050-a395-f25865695c6a tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Lock "47e1551e-ac80-4b4e-b568-3931c6dcf3b3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 488.765s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2067.104311] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0e43f8ae-3cca-4050-a395-f25865695c6a tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Acquiring lock "47e1551e-ac80-4b4e-b568-3931c6dcf3b3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2067.104311] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0e43f8ae-3cca-4050-a395-f25865695c6a tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Lock "47e1551e-ac80-4b4e-b568-3931c6dcf3b3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2067.104311] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0e43f8ae-3cca-4050-a395-f25865695c6a tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Lock "47e1551e-ac80-4b4e-b568-3931c6dcf3b3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.106857] env[61663]: INFO nova.compute.manager [None req-0e43f8ae-3cca-4050-a395-f25865695c6a tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Terminating instance [ 2067.109855] env[61663]: DEBUG nova.compute.manager [None req-0e43f8ae-3cca-4050-a395-f25865695c6a tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2067.110190] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-0e43f8ae-3cca-4050-a395-f25865695c6a tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2067.111461] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-584636d1-59ee-40d1-b552-edc2cf278472 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.117612] env[61663]: DEBUG nova.compute.manager [None req-0be60762-7a40-4c6c-b95c-8e521424ddd8 tempest-AttachInterfacesTestJSON-1267786142 tempest-AttachInterfacesTestJSON-1267786142-project-member] [instance: 59d4580f-5897-42d6-82cb-0aead4d2658c] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2067.124580] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-695b2fd9-33da-4a5b-95de-551d99cd2bf1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.140208] env[61663]: INFO nova.scheduler.client.report [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Deleted allocations for instance 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7 [ 2067.149945] env[61663]: DEBUG nova.compute.manager [None req-0be60762-7a40-4c6c-b95c-8e521424ddd8 tempest-AttachInterfacesTestJSON-1267786142 tempest-AttachInterfacesTestJSON-1267786142-project-member] [instance: 59d4580f-5897-42d6-82cb-0aead4d2658c] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 2067.168803] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-0e43f8ae-3cca-4050-a395-f25865695c6a tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 47e1551e-ac80-4b4e-b568-3931c6dcf3b3 could not be found. [ 2067.169041] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-0e43f8ae-3cca-4050-a395-f25865695c6a tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2067.170054] env[61663]: INFO nova.compute.manager [None req-0e43f8ae-3cca-4050-a395-f25865695c6a tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Took 0.06 seconds to destroy the instance on the hypervisor. [ 2067.170054] env[61663]: DEBUG oslo.service.loopingcall [None req-0e43f8ae-3cca-4050-a395-f25865695c6a tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2067.170054] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f4ba32df-8f91-41f9-8759-971d2c8ea8b5 tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Lock "689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 637.061s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.170395] env[61663]: DEBUG nova.compute.manager [-] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2067.170514] env[61663]: DEBUG nova.network.neutron [-] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2067.173827] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ace605d5-bebb-4e96-bd07-e1cb8e13789c tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Lock "689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 439.872s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2067.174292] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ace605d5-bebb-4e96-bd07-e1cb8e13789c tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Acquiring lock "689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2067.174292] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ace605d5-bebb-4e96-bd07-e1cb8e13789c tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Lock "689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2067.174421] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ace605d5-bebb-4e96-bd07-e1cb8e13789c tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Lock "689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.177453] env[61663]: INFO nova.compute.manager [None req-ace605d5-bebb-4e96-bd07-e1cb8e13789c tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Terminating instance [ 2067.182564] env[61663]: DEBUG nova.compute.manager [None req-ace605d5-bebb-4e96-bd07-e1cb8e13789c tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2067.182705] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-ace605d5-bebb-4e96-bd07-e1cb8e13789c tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2067.183260] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ffe93022-e60e-4420-9d50-4d832a13fd02 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.188495] env[61663]: DEBUG nova.compute.manager [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2067.198064] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c80705-c443-44af-8185-3c9e5c08a344 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.211914] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0be60762-7a40-4c6c-b95c-8e521424ddd8 tempest-AttachInterfacesTestJSON-1267786142 tempest-AttachInterfacesTestJSON-1267786142-project-member] Lock "59d4580f-5897-42d6-82cb-0aead4d2658c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.400s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.217723] env[61663]: DEBUG nova.network.neutron [-] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2067.229712] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-ace605d5-bebb-4e96-bd07-e1cb8e13789c tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7 could not be found. [ 2067.229911] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-ace605d5-bebb-4e96-bd07-e1cb8e13789c tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2067.230100] env[61663]: INFO nova.compute.manager [None req-ace605d5-bebb-4e96-bd07-e1cb8e13789c tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2067.230347] env[61663]: DEBUG oslo.service.loopingcall [None req-ace605d5-bebb-4e96-bd07-e1cb8e13789c tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2067.230722] env[61663]: INFO nova.compute.manager [-] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] Took 0.06 seconds to deallocate network for instance. [ 2067.230928] env[61663]: DEBUG nova.compute.manager [-] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2067.231042] env[61663]: DEBUG nova.network.neutron [-] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2067.240333] env[61663]: DEBUG nova.compute.manager [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2067.263975] env[61663]: DEBUG oslo_concurrency.lockutils [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2067.263975] env[61663]: DEBUG oslo_concurrency.lockutils [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2067.265329] env[61663]: INFO nova.compute.claims [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2067.329226] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2067.373591] env[61663]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61663) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2067.373591] env[61663]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-e7861071-533b-4977-8305-60397538ea19'] [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall self._deallocate_network( [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2067.374386] env[61663]: ERROR oslo.service.loopingcall [ 2067.377910] env[61663]: ERROR nova.compute.manager [None req-ace605d5-bebb-4e96-bd07-e1cb8e13789c tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2067.382220] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0e43f8ae-3cca-4050-a395-f25865695c6a tempest-FloatingIPsAssociationTestJSON-263078227 tempest-FloatingIPsAssociationTestJSON-263078227-project-member] Lock "47e1551e-ac80-4b4e-b568-3931c6dcf3b3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.279s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.383243] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "47e1551e-ac80-4b4e-b568-3931c6dcf3b3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 278.297s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2067.383427] env[61663]: INFO nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 47e1551e-ac80-4b4e-b568-3931c6dcf3b3] During sync_power_state the instance has a pending task (deleting). Skip. [ 2067.383602] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "47e1551e-ac80-4b4e-b568-3931c6dcf3b3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.419714] env[61663]: ERROR nova.compute.manager [None req-ace605d5-bebb-4e96-bd07-e1cb8e13789c tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Traceback (most recent call last): [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] ret = obj(*args, **kwargs) [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] exception_handler_v20(status_code, error_body) [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] raise client_exc(message=error_message, [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Neutron server returns request_ids: ['req-e7861071-533b-4977-8305-60397538ea19'] [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] During handling of the above exception, another exception occurred: [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Traceback (most recent call last): [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] self._delete_instance(context, instance, bdms) [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] self._shutdown_instance(context, instance, bdms) [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] self._try_deallocate_network(context, instance, requested_networks) [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] with excutils.save_and_reraise_exception(): [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] self.force_reraise() [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] raise self.value [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] _deallocate_network_with_retries() [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return evt.wait() [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] result = hub.switch() [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self.greenlet.switch() [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] result = func(*self.args, **self.kw) [ 2067.419714] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] result = f(*args, **kwargs) [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] self._deallocate_network( [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] self.network_api.deallocate_for_instance( [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] data = neutron.list_ports(**search_opts) [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] ret = obj(*args, **kwargs) [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self.list('ports', self.ports_path, retrieve_all, [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] ret = obj(*args, **kwargs) [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] for r in self._pagination(collection, path, **params): [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] res = self.get(path, params=params) [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] ret = obj(*args, **kwargs) [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self.retry_request("GET", action, body=body, [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] ret = obj(*args, **kwargs) [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] return self.do_request(method, action, body=body, [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] ret = obj(*args, **kwargs) [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] self._handle_fault_response(status_code, replybody, resp) [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2067.420660] env[61663]: ERROR nova.compute.manager [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] [ 2067.446849] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ace605d5-bebb-4e96-bd07-e1cb8e13789c tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Lock "689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.273s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.454068] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 278.367s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2067.454344] env[61663]: INFO nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] During sync_power_state the instance has a pending task (deleting). Skip. [ 2067.454535] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.505814] env[61663]: INFO nova.compute.manager [None req-ace605d5-bebb-4e96-bd07-e1cb8e13789c tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] [instance: 689ea5bd-1ab7-4c57-9de0-2c2ff99e84e7] Successfully reverted task state from None on failure for instance. [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server [None req-ace605d5-bebb-4e96-bd07-e1cb8e13789c tempest-TenantUsagesTestJSON-1156964182 tempest-TenantUsagesTestJSON-1156964182-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-e7861071-533b-4977-8305-60397538ea19'] [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server raise self.value [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server raise self.value [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server raise self.value [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 2067.511315] env[61663]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server raise self.value [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server raise self.value [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server return evt.wait() [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2067.512735] env[61663]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2067.513945] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2067.513945] env[61663]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 2067.513945] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2067.513945] env[61663]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2067.513945] env[61663]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2067.513945] env[61663]: ERROR oslo_messaging.rpc.server [ 2067.584978] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdee759f-766e-48ad-9120-8b92e7358c78 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.594875] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d940ba8c-4d44-4a41-9a23-f4e985d7d5d0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.624293] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ad6f05-4eea-4fe4-a552-6e90b1d9aa91 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.631905] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8220f4b1-1f07-4f37-a053-fd6fed13430a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.645276] env[61663]: DEBUG nova.compute.provider_tree [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2067.657361] env[61663]: DEBUG nova.scheduler.client.report [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2067.679100] env[61663]: DEBUG oslo_concurrency.lockutils [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.415s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.679653] env[61663]: DEBUG nova.compute.manager [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2067.685217] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.354s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2067.685217] env[61663]: INFO nova.compute.claims [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2067.736637] env[61663]: DEBUG nova.compute.utils [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2067.736637] env[61663]: DEBUG nova.compute.manager [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2067.736637] env[61663]: DEBUG nova.network.neutron [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2067.746666] env[61663]: DEBUG nova.compute.manager [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2067.821162] env[61663]: DEBUG nova.policy [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cd14a4a142854732a9567efe1552ea82', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0c055b04d92c49cd90d470ad709f0181', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 2067.826543] env[61663]: DEBUG nova.compute.manager [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2067.864991] env[61663]: DEBUG nova.virt.hardware [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2067.865291] env[61663]: DEBUG nova.virt.hardware [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2067.865460] env[61663]: DEBUG nova.virt.hardware [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2067.865653] env[61663]: DEBUG nova.virt.hardware [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2067.865841] env[61663]: DEBUG nova.virt.hardware [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2067.865974] env[61663]: DEBUG nova.virt.hardware [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2067.866222] env[61663]: DEBUG nova.virt.hardware [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2067.866391] env[61663]: DEBUG nova.virt.hardware [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2067.866570] env[61663]: DEBUG nova.virt.hardware [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2067.866736] env[61663]: DEBUG nova.virt.hardware [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2067.866915] env[61663]: DEBUG nova.virt.hardware [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2067.868097] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0471910-f81e-4934-9c7d-62ea9d1f68ab {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.878570] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c44e27-fceb-4e11-b956-fa9d2a876084 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.989433] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc51ad8-9a9b-4e09-af8e-f7830baca919 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.997782] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f34022d6-ee13-474f-b26f-c0351ee95b45 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.027915] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d9fe82-0b67-4b45-8a87-6a2c3e50bf4c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.035301] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e219ce7-6e32-483f-aa37-d1081108e20b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.048757] env[61663]: DEBUG nova.compute.provider_tree [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2068.059422] env[61663]: DEBUG nova.scheduler.client.report [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2068.080529] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.397s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.081087] env[61663]: DEBUG nova.compute.manager [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2068.136129] env[61663]: DEBUG nova.compute.utils [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2068.141030] env[61663]: DEBUG nova.compute.manager [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2068.141030] env[61663]: DEBUG nova.network.neutron [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2068.153041] env[61663]: DEBUG nova.compute.manager [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2068.224662] env[61663]: DEBUG nova.compute.manager [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2068.245990] env[61663]: DEBUG nova.virt.hardware [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2068.246269] env[61663]: DEBUG nova.virt.hardware [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2068.246428] env[61663]: DEBUG nova.virt.hardware [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2068.246607] env[61663]: DEBUG nova.virt.hardware [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2068.246754] env[61663]: DEBUG nova.virt.hardware [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2068.246903] env[61663]: DEBUG nova.virt.hardware [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2068.247314] env[61663]: DEBUG nova.virt.hardware [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2068.247613] env[61663]: DEBUG nova.virt.hardware [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2068.247679] env[61663]: DEBUG nova.virt.hardware [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2068.247845] env[61663]: DEBUG nova.virt.hardware [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2068.248092] env[61663]: DEBUG nova.virt.hardware [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2068.248869] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d059b8c-6ea0-420e-992b-e00f73ca50e1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.253365] env[61663]: DEBUG nova.policy [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '23af862ab660499ab02b71d7cbbe87a7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '231acc431e92432795932c50511f2944', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 2068.260076] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-407ed0bd-6495-48ff-8b53-eae22d513af9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.482512] env[61663]: DEBUG nova.network.neutron [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Successfully created port: 01f8dc08-25f3-4ea3-a9ba-6283c3d6a81b {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2069.266695] env[61663]: DEBUG nova.network.neutron [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Successfully created port: ac54de0b-5aeb-4758-bfd2-bf7830a05454 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2069.316164] env[61663]: DEBUG oslo_concurrency.lockutils [None req-aca5ca74-d895-4661-8457-c008e4f214ac tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Acquiring lock "b58c2b47-6508-491c-ad2c-ac86e654c7ae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2069.316400] env[61663]: DEBUG oslo_concurrency.lockutils [None req-aca5ca74-d895-4661-8457-c008e4f214ac tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Lock "b58c2b47-6508-491c-ad2c-ac86e654c7ae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2070.289383] env[61663]: DEBUG nova.compute.manager [req-e446eed4-0633-4667-9867-f241dcd83ff3 req-77d3c096-4085-4266-af0e-7ccbfcdc1c09 service nova] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Received event network-vif-plugged-01f8dc08-25f3-4ea3-a9ba-6283c3d6a81b {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2070.289625] env[61663]: DEBUG oslo_concurrency.lockutils [req-e446eed4-0633-4667-9867-f241dcd83ff3 req-77d3c096-4085-4266-af0e-7ccbfcdc1c09 service nova] Acquiring lock "525749ba-7de2-4ec5-8f7b-1f4c291710fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2070.289807] env[61663]: DEBUG oslo_concurrency.lockutils [req-e446eed4-0633-4667-9867-f241dcd83ff3 req-77d3c096-4085-4266-af0e-7ccbfcdc1c09 service nova] Lock "525749ba-7de2-4ec5-8f7b-1f4c291710fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2070.289978] env[61663]: DEBUG oslo_concurrency.lockutils [req-e446eed4-0633-4667-9867-f241dcd83ff3 req-77d3c096-4085-4266-af0e-7ccbfcdc1c09 service nova] Lock "525749ba-7de2-4ec5-8f7b-1f4c291710fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2070.290161] env[61663]: DEBUG nova.compute.manager [req-e446eed4-0633-4667-9867-f241dcd83ff3 req-77d3c096-4085-4266-af0e-7ccbfcdc1c09 service nova] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] No waiting events found dispatching network-vif-plugged-01f8dc08-25f3-4ea3-a9ba-6283c3d6a81b {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2070.290325] env[61663]: WARNING nova.compute.manager [req-e446eed4-0633-4667-9867-f241dcd83ff3 req-77d3c096-4085-4266-af0e-7ccbfcdc1c09 service nova] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Received unexpected event network-vif-plugged-01f8dc08-25f3-4ea3-a9ba-6283c3d6a81b for instance with vm_state building and task_state spawning. [ 2070.410591] env[61663]: DEBUG nova.network.neutron [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Successfully updated port: 01f8dc08-25f3-4ea3-a9ba-6283c3d6a81b {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2070.421492] env[61663]: DEBUG oslo_concurrency.lockutils [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Acquiring lock "refresh_cache-525749ba-7de2-4ec5-8f7b-1f4c291710fa" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2070.421642] env[61663]: DEBUG oslo_concurrency.lockutils [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Acquired lock "refresh_cache-525749ba-7de2-4ec5-8f7b-1f4c291710fa" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2070.421792] env[61663]: DEBUG nova.network.neutron [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2070.498756] env[61663]: DEBUG nova.network.neutron [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2070.904474] env[61663]: DEBUG nova.network.neutron [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Updating instance_info_cache with network_info: [{"id": "01f8dc08-25f3-4ea3-a9ba-6283c3d6a81b", "address": "fa:16:3e:f7:35:64", "network": {"id": "8e6349a5-4006-4ac5-b128-127a67cc707e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-64124567-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0c055b04d92c49cd90d470ad709f0181", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01f8dc08-25", "ovs_interfaceid": "01f8dc08-25f3-4ea3-a9ba-6283c3d6a81b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2070.915635] env[61663]: DEBUG oslo_concurrency.lockutils [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Releasing lock "refresh_cache-525749ba-7de2-4ec5-8f7b-1f4c291710fa" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2070.915931] env[61663]: DEBUG nova.compute.manager [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Instance network_info: |[{"id": "01f8dc08-25f3-4ea3-a9ba-6283c3d6a81b", "address": "fa:16:3e:f7:35:64", "network": {"id": "8e6349a5-4006-4ac5-b128-127a67cc707e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-64124567-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0c055b04d92c49cd90d470ad709f0181", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01f8dc08-25", "ovs_interfaceid": "01f8dc08-25f3-4ea3-a9ba-6283c3d6a81b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2070.916360] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:35:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69d412f5-01a9-4fed-8488-7b767a13a653', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '01f8dc08-25f3-4ea3-a9ba-6283c3d6a81b', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2070.924097] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Creating folder: Project (0c055b04d92c49cd90d470ad709f0181). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2070.924639] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d9c30430-156a-43dc-b02e-e887ed09d2ad {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.936693] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Created folder: Project (0c055b04d92c49cd90d470ad709f0181) in parent group-v352575. [ 2070.936883] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Creating folder: Instances. Parent ref: group-v352658. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2070.937123] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8af2df5c-f3ce-4524-9803-5c261b57605f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.945542] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Created folder: Instances in parent group-v352658. [ 2070.945798] env[61663]: DEBUG oslo.service.loopingcall [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2070.945983] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2070.946316] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a427a8fd-aef6-4448-a53c-870d480d3e15 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.961414] env[61663]: DEBUG nova.network.neutron [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Successfully updated port: ac54de0b-5aeb-4758-bfd2-bf7830a05454 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2070.968109] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2070.968109] env[61663]: value = "task-1690822" [ 2070.968109] env[61663]: _type = "Task" [ 2070.968109] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2070.972677] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquiring lock "refresh_cache-740f7887-4a5c-4889-9635-e9d9c6607ee7" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2070.972805] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquired lock "refresh_cache-740f7887-4a5c-4889-9635-e9d9c6607ee7" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2070.972954] env[61663]: DEBUG nova.network.neutron [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2070.980428] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690822, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.032192] env[61663]: DEBUG nova.network.neutron [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2071.479586] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690822, 'name': CreateVM_Task, 'duration_secs': 0.29668} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2071.481894] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2071.482530] env[61663]: DEBUG oslo_concurrency.lockutils [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2071.482696] env[61663]: DEBUG oslo_concurrency.lockutils [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2071.483019] env[61663]: DEBUG oslo_concurrency.lockutils [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2071.483568] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdad1cd5-9505-43b4-a7b3-d4967b75981d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.488062] env[61663]: DEBUG oslo_vmware.api [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Waiting for the task: (returnval){ [ 2071.488062] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c9e128-0b70-52a7-9814-34f97ff3878e" [ 2071.488062] env[61663]: _type = "Task" [ 2071.488062] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2071.495515] env[61663]: DEBUG oslo_vmware.api [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c9e128-0b70-52a7-9814-34f97ff3878e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.554321] env[61663]: DEBUG nova.network.neutron [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Updating instance_info_cache with network_info: [{"id": "ac54de0b-5aeb-4758-bfd2-bf7830a05454", "address": "fa:16:3e:1e:9a:e1", "network": {"id": "c74991f7-41c9-42d7-9978-5fba7e2b62af", "bridge": "br-int", "label": "tempest-ServersTestJSON-1185630305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "231acc431e92432795932c50511f2944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac54de0b-5a", "ovs_interfaceid": "ac54de0b-5aeb-4758-bfd2-bf7830a05454", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2071.567881] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Releasing lock "refresh_cache-740f7887-4a5c-4889-9635-e9d9c6607ee7" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2071.567881] env[61663]: DEBUG nova.compute.manager [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Instance network_info: |[{"id": "ac54de0b-5aeb-4758-bfd2-bf7830a05454", "address": "fa:16:3e:1e:9a:e1", "network": {"id": "c74991f7-41c9-42d7-9978-5fba7e2b62af", "bridge": "br-int", "label": "tempest-ServersTestJSON-1185630305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "231acc431e92432795932c50511f2944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac54de0b-5a", "ovs_interfaceid": "ac54de0b-5aeb-4758-bfd2-bf7830a05454", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2071.567881] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:9a:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f67a2790-f2b0-4d03-b606-0bfaee7a4229', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ac54de0b-5aeb-4758-bfd2-bf7830a05454', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2071.573212] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Creating folder: Project (231acc431e92432795932c50511f2944). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2071.573727] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-261ffa3e-6305-4a21-a3e0-841cbaaf4d2f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.584180] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Created folder: Project (231acc431e92432795932c50511f2944) in parent group-v352575. [ 2071.584366] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Creating folder: Instances. Parent ref: group-v352661. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2071.584577] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5ce5f245-20dc-442f-a8da-60b76b0cff8b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.595015] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Created folder: Instances in parent group-v352661. [ 2071.595116] env[61663]: DEBUG oslo.service.loopingcall [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2071.595276] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2071.595462] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1fd25e1e-a4ce-4854-87b9-2f6d36c96b03 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.614355] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2071.614355] env[61663]: value = "task-1690825" [ 2071.614355] env[61663]: _type = "Task" [ 2071.614355] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2071.620234] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690825, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.998736] env[61663]: DEBUG oslo_concurrency.lockutils [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2071.999017] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2071.999257] env[61663]: DEBUG oslo_concurrency.lockutils [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2072.123070] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690825, 'name': CreateVM_Task, 'duration_secs': 0.28995} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2072.123245] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2072.123891] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2072.124064] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2072.124366] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2072.124660] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c53bbb8-393c-4d38-83d2-ed71c60c7195 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.128838] env[61663]: DEBUG oslo_vmware.api [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Waiting for the task: (returnval){ [ 2072.128838] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522f4c11-bd86-a841-53eb-736c24932055" [ 2072.128838] env[61663]: _type = "Task" [ 2072.128838] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2072.135984] env[61663]: DEBUG oslo_vmware.api [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522f4c11-bd86-a841-53eb-736c24932055, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2072.322832] env[61663]: DEBUG nova.compute.manager [req-9bf11dd3-58ca-4026-95c7-9c1f02ed49ad req-5c9b7286-deee-45e5-8eae-ef7000de88df service nova] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Received event network-changed-01f8dc08-25f3-4ea3-a9ba-6283c3d6a81b {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2072.323211] env[61663]: DEBUG nova.compute.manager [req-9bf11dd3-58ca-4026-95c7-9c1f02ed49ad req-5c9b7286-deee-45e5-8eae-ef7000de88df service nova] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Refreshing instance network info cache due to event network-changed-01f8dc08-25f3-4ea3-a9ba-6283c3d6a81b. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2072.323462] env[61663]: DEBUG oslo_concurrency.lockutils [req-9bf11dd3-58ca-4026-95c7-9c1f02ed49ad req-5c9b7286-deee-45e5-8eae-ef7000de88df service nova] Acquiring lock "refresh_cache-525749ba-7de2-4ec5-8f7b-1f4c291710fa" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2072.323614] env[61663]: DEBUG oslo_concurrency.lockutils [req-9bf11dd3-58ca-4026-95c7-9c1f02ed49ad req-5c9b7286-deee-45e5-8eae-ef7000de88df service nova] Acquired lock "refresh_cache-525749ba-7de2-4ec5-8f7b-1f4c291710fa" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2072.323782] env[61663]: DEBUG nova.network.neutron [req-9bf11dd3-58ca-4026-95c7-9c1f02ed49ad req-5c9b7286-deee-45e5-8eae-ef7000de88df service nova] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Refreshing network info cache for port 01f8dc08-25f3-4ea3-a9ba-6283c3d6a81b {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2072.638788] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2072.639087] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2072.639259] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2072.673293] env[61663]: DEBUG nova.network.neutron [req-9bf11dd3-58ca-4026-95c7-9c1f02ed49ad req-5c9b7286-deee-45e5-8eae-ef7000de88df service nova] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Updated VIF entry in instance network info cache for port 01f8dc08-25f3-4ea3-a9ba-6283c3d6a81b. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2072.673655] env[61663]: DEBUG nova.network.neutron [req-9bf11dd3-58ca-4026-95c7-9c1f02ed49ad req-5c9b7286-deee-45e5-8eae-ef7000de88df service nova] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Updating instance_info_cache with network_info: [{"id": "01f8dc08-25f3-4ea3-a9ba-6283c3d6a81b", "address": "fa:16:3e:f7:35:64", "network": {"id": "8e6349a5-4006-4ac5-b128-127a67cc707e", "bridge": "br-int", "label": "tempest-ImagesTestJSON-64124567-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0c055b04d92c49cd90d470ad709f0181", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69d412f5-01a9-4fed-8488-7b767a13a653", "external-id": "nsx-vlan-transportzone-444", "segmentation_id": 444, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01f8dc08-25", "ovs_interfaceid": "01f8dc08-25f3-4ea3-a9ba-6283c3d6a81b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2072.685364] env[61663]: DEBUG oslo_concurrency.lockutils [req-9bf11dd3-58ca-4026-95c7-9c1f02ed49ad req-5c9b7286-deee-45e5-8eae-ef7000de88df service nova] Releasing lock "refresh_cache-525749ba-7de2-4ec5-8f7b-1f4c291710fa" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2072.685650] env[61663]: DEBUG nova.compute.manager [req-9bf11dd3-58ca-4026-95c7-9c1f02ed49ad req-5c9b7286-deee-45e5-8eae-ef7000de88df service nova] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Received event network-vif-plugged-ac54de0b-5aeb-4758-bfd2-bf7830a05454 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2072.685854] env[61663]: DEBUG oslo_concurrency.lockutils [req-9bf11dd3-58ca-4026-95c7-9c1f02ed49ad req-5c9b7286-deee-45e5-8eae-ef7000de88df service nova] Acquiring lock "740f7887-4a5c-4889-9635-e9d9c6607ee7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2072.686105] env[61663]: DEBUG oslo_concurrency.lockutils [req-9bf11dd3-58ca-4026-95c7-9c1f02ed49ad req-5c9b7286-deee-45e5-8eae-ef7000de88df service nova] Lock "740f7887-4a5c-4889-9635-e9d9c6607ee7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2072.686284] env[61663]: DEBUG oslo_concurrency.lockutils [req-9bf11dd3-58ca-4026-95c7-9c1f02ed49ad req-5c9b7286-deee-45e5-8eae-ef7000de88df service nova] Lock "740f7887-4a5c-4889-9635-e9d9c6607ee7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2072.686452] env[61663]: DEBUG nova.compute.manager [req-9bf11dd3-58ca-4026-95c7-9c1f02ed49ad req-5c9b7286-deee-45e5-8eae-ef7000de88df service nova] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] No waiting events found dispatching network-vif-plugged-ac54de0b-5aeb-4758-bfd2-bf7830a05454 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2072.686621] env[61663]: WARNING nova.compute.manager [req-9bf11dd3-58ca-4026-95c7-9c1f02ed49ad req-5c9b7286-deee-45e5-8eae-ef7000de88df service nova] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Received unexpected event network-vif-plugged-ac54de0b-5aeb-4758-bfd2-bf7830a05454 for instance with vm_state building and task_state spawning. [ 2072.686789] env[61663]: DEBUG nova.compute.manager [req-9bf11dd3-58ca-4026-95c7-9c1f02ed49ad req-5c9b7286-deee-45e5-8eae-ef7000de88df service nova] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Received event network-changed-ac54de0b-5aeb-4758-bfd2-bf7830a05454 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2072.686947] env[61663]: DEBUG nova.compute.manager [req-9bf11dd3-58ca-4026-95c7-9c1f02ed49ad req-5c9b7286-deee-45e5-8eae-ef7000de88df service nova] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Refreshing instance network info cache due to event network-changed-ac54de0b-5aeb-4758-bfd2-bf7830a05454. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2072.687146] env[61663]: DEBUG oslo_concurrency.lockutils [req-9bf11dd3-58ca-4026-95c7-9c1f02ed49ad req-5c9b7286-deee-45e5-8eae-ef7000de88df service nova] Acquiring lock "refresh_cache-740f7887-4a5c-4889-9635-e9d9c6607ee7" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2072.687286] env[61663]: DEBUG oslo_concurrency.lockutils [req-9bf11dd3-58ca-4026-95c7-9c1f02ed49ad req-5c9b7286-deee-45e5-8eae-ef7000de88df service nova] Acquired lock "refresh_cache-740f7887-4a5c-4889-9635-e9d9c6607ee7" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2072.687444] env[61663]: DEBUG nova.network.neutron [req-9bf11dd3-58ca-4026-95c7-9c1f02ed49ad req-5c9b7286-deee-45e5-8eae-ef7000de88df service nova] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Refreshing network info cache for port ac54de0b-5aeb-4758-bfd2-bf7830a05454 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2073.029063] env[61663]: DEBUG nova.network.neutron [req-9bf11dd3-58ca-4026-95c7-9c1f02ed49ad req-5c9b7286-deee-45e5-8eae-ef7000de88df service nova] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Updated VIF entry in instance network info cache for port ac54de0b-5aeb-4758-bfd2-bf7830a05454. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2073.029434] env[61663]: DEBUG nova.network.neutron [req-9bf11dd3-58ca-4026-95c7-9c1f02ed49ad req-5c9b7286-deee-45e5-8eae-ef7000de88df service nova] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Updating instance_info_cache with network_info: [{"id": "ac54de0b-5aeb-4758-bfd2-bf7830a05454", "address": "fa:16:3e:1e:9a:e1", "network": {"id": "c74991f7-41c9-42d7-9978-5fba7e2b62af", "bridge": "br-int", "label": "tempest-ServersTestJSON-1185630305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "231acc431e92432795932c50511f2944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac54de0b-5a", "ovs_interfaceid": "ac54de0b-5aeb-4758-bfd2-bf7830a05454", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2073.039630] env[61663]: DEBUG oslo_concurrency.lockutils [req-9bf11dd3-58ca-4026-95c7-9c1f02ed49ad req-5c9b7286-deee-45e5-8eae-ef7000de88df service nova] Releasing lock "refresh_cache-740f7887-4a5c-4889-9635-e9d9c6607ee7" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2075.692169] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2075.692572] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2079.688762] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2081.387342] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e9e9aa7b-c220-4711-bfe0-684908579aa9 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Acquiring lock "525749ba-7de2-4ec5-8f7b-1f4c291710fa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2081.691591] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2081.691776] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2081.691900] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2081.714386] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2081.714563] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2081.714699] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2081.714827] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2081.714950] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2081.715137] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2081.715269] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2081.715472] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2081.715639] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2081.715764] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2081.715885] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2081.716408] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2082.692250] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2082.692568] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2082.703594] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2082.703827] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2082.704014] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2082.704180] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2082.705586] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c8e800-a742-4285-8d23-95b726d1039b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.714514] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc4dc1c-6817-4b3d-aa32-e0d793596810 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.729412] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c9d046-e48d-449b-ad52-576a264acb0d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.735669] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-530b729d-8f87-4408-b010-bf17571c2dde {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.764331] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181309MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2082.764486] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2082.764683] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2082.902878] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ef8528db-1338-4af6-9d4a-5eda7fe69a98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2082.903052] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 94f7665c-5247-4474-a9ea-700f1778af81 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2082.903186] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 768bef02-a114-4cac-a614-6e8a04ce0d18 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2082.903318] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2082.903466] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b51a331f-2b96-457f-9c9e-99379e8ae7fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2082.903613] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 440175fc-da0c-4ea3-9a74-46e97e32658b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2082.903734] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 668c457f-7ebc-441f-8ece-cc63c571363b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2082.903850] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ae347f45-f39e-47eb-9e37-80ddfc502c27 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2082.903963] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 525749ba-7de2-4ec5-8f7b-1f4c291710fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2082.904090] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 740f7887-4a5c-4889-9635-e9d9c6607ee7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2082.915289] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 44a9bf55-1c16-49aa-a61f-611696fb2c54 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2082.925557] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 54a78c20-cbf6-453b-88e4-2fb4da0a6200 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2082.934301] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 8e705624-9787-4d34-a3d4-f56b7b4fdcc2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2082.943496] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 12ecd7c0-dcb3-42f6-8560-c239f786254c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2082.952055] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance e47c9821-f815-4bd5-bf00-8822f08e3333 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2082.961243] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b58c2b47-6508-491c-ad2c-ac86e654c7ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2082.961466] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2082.961613] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2082.977968] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Refreshing inventories for resource provider b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2082.992482] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Updating ProviderTree inventory for provider b47d006d-a9bd-461e-a5d9-39811f005278 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2082.992672] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Updating inventory in ProviderTree for provider b47d006d-a9bd-461e-a5d9-39811f005278 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2083.003519] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Refreshing aggregate associations for resource provider b47d006d-a9bd-461e-a5d9-39811f005278, aggregates: None {{(pid=61663) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2083.021147] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Refreshing trait associations for resource provider b47d006d-a9bd-461e-a5d9-39811f005278, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61663) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2083.193079] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cde6660d-c23a-4ef3-81b6-726d7475a316 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.200669] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6345ee42-db91-461c-ae6a-d8f6486bc78c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.230901] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a84f3eb-d7f8-4fa5-8e2c-b4cde8ea502e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.237804] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35f44964-afc4-4654-a5ac-8fae07b78bd9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.250305] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2083.260152] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2083.274591] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2083.274783] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.510s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2083.275128] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2083.275128] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Cleaning up deleted instances {{(pid=61663) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 2083.286893] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] There are 1 instances to clean {{(pid=61663) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 2083.287168] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: b583b039-84c7-4168-91a1-82821c0001a3] Instance has had 0 of 5 cleanup attempts {{(pid=61663) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11232}} [ 2084.321754] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2085.692123] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2086.700872] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2086.701182] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Cleaning up deleted instances with incomplete migration {{(pid=61663) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 2087.700129] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2087.700341] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2096.243006] env[61663]: DEBUG oslo_concurrency.lockutils [None req-6427d398-dbd8-4812-9607-7f99b340b1e8 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquiring lock "740f7887-4a5c-4889-9635-e9d9c6607ee7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2107.582465] env[61663]: DEBUG oslo_concurrency.lockutils [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Acquiring lock "c21a5af5-004b-4544-bcf0-f105d6f336c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2107.582786] env[61663]: DEBUG oslo_concurrency.lockutils [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Lock "c21a5af5-004b-4544-bcf0-f105d6f336c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2115.757187] env[61663]: WARNING oslo_vmware.rw_handles [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2115.757187] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2115.757187] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2115.757187] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2115.757187] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2115.757187] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 2115.757187] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2115.757187] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2115.757187] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2115.757187] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2115.757187] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2115.757187] env[61663]: ERROR oslo_vmware.rw_handles [ 2115.758155] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/3ab9dd93-52cd-4690-9f52-59560cae578e/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2115.759999] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2115.760281] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Copying Virtual Disk [datastore1] vmware_temp/3ab9dd93-52cd-4690-9f52-59560cae578e/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/3ab9dd93-52cd-4690-9f52-59560cae578e/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2115.760591] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c9443f0-4a31-4a79-b135-1f002b1f5fe2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.768492] env[61663]: DEBUG oslo_vmware.api [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Waiting for the task: (returnval){ [ 2115.768492] env[61663]: value = "task-1690826" [ 2115.768492] env[61663]: _type = "Task" [ 2115.768492] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.776333] env[61663]: DEBUG oslo_vmware.api [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Task: {'id': task-1690826, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.279152] env[61663]: DEBUG oslo_vmware.exceptions [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2116.279465] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2116.280066] env[61663]: ERROR nova.compute.manager [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2116.280066] env[61663]: Faults: ['InvalidArgument'] [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Traceback (most recent call last): [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] yield resources [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] self.driver.spawn(context, instance, image_meta, [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] self._fetch_image_if_missing(context, vi) [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] image_cache(vi, tmp_image_ds_loc) [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] vm_util.copy_virtual_disk( [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] session._wait_for_task(vmdk_copy_task) [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] return self.wait_for_task(task_ref) [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] return evt.wait() [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] result = hub.switch() [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] return self.greenlet.switch() [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] self.f(*self.args, **self.kw) [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] raise exceptions.translate_fault(task_info.error) [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Faults: ['InvalidArgument'] [ 2116.280066] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] [ 2116.281537] env[61663]: INFO nova.compute.manager [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Terminating instance [ 2116.281989] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2116.282223] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2116.282459] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ac7b170-aea2-424f-84ac-d686dbc7f3d2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.284706] env[61663]: DEBUG nova.compute.manager [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2116.284921] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2116.285649] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af00fcb6-d6c7-470a-8f85-96a4419ac8f5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.293540] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2116.293750] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-20a55b13-6c0d-4990-ad9c-f8701dc58412 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.295922] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2116.296107] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2116.297023] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d3213de-84a4-4514-bae0-5aaa30982048 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.301368] env[61663]: DEBUG oslo_vmware.api [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Waiting for the task: (returnval){ [ 2116.301368] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c7f8e5-f7cb-066d-9cdc-3dac7d10630c" [ 2116.301368] env[61663]: _type = "Task" [ 2116.301368] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.308367] env[61663]: DEBUG oslo_vmware.api [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c7f8e5-f7cb-066d-9cdc-3dac7d10630c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.364841] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2116.365103] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2116.365295] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Deleting the datastore file [datastore1] ef8528db-1338-4af6-9d4a-5eda7fe69a98 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2116.365571] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b4fee200-3d47-48d3-9efc-0cff4f84513b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.371682] env[61663]: DEBUG oslo_vmware.api [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Waiting for the task: (returnval){ [ 2116.371682] env[61663]: value = "task-1690828" [ 2116.371682] env[61663]: _type = "Task" [ 2116.371682] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.379265] env[61663]: DEBUG oslo_vmware.api [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Task: {'id': task-1690828, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.812054] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2116.812399] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Creating directory with path [datastore1] vmware_temp/8f5178c5-29ad-4a6f-b021-ea36f2aba118/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2116.812512] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-856f368e-7202-4cd2-b317-b6b7f57a3633 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.823701] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Created directory with path [datastore1] vmware_temp/8f5178c5-29ad-4a6f-b021-ea36f2aba118/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2116.823886] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Fetch image to [datastore1] vmware_temp/8f5178c5-29ad-4a6f-b021-ea36f2aba118/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2116.824069] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/8f5178c5-29ad-4a6f-b021-ea36f2aba118/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2116.824782] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ac8f650-039c-4c1b-8a16-73036955b256 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.830776] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739de59b-7502-4288-9844-a3f09a93b189 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.839461] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d652124-54eb-4b0c-8a24-6d66dd098029 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.868576] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43d3ddc-2769-45bf-97c6-029ece17c80e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.876391] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-947b02f3-c9ac-449b-8281-d96ee0921eea {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.880377] env[61663]: DEBUG oslo_vmware.api [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Task: {'id': task-1690828, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073933} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2116.880883] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2116.881097] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2116.881286] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2116.881468] env[61663]: INFO nova.compute.manager [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2116.883528] env[61663]: DEBUG nova.compute.claims [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2116.883688] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2116.883903] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2116.900518] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2116.949768] env[61663]: DEBUG oslo_vmware.rw_handles [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8f5178c5-29ad-4a6f-b021-ea36f2aba118/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2117.013085] env[61663]: DEBUG oslo_vmware.rw_handles [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2117.013293] env[61663]: DEBUG oslo_vmware.rw_handles [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8f5178c5-29ad-4a6f-b021-ea36f2aba118/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2117.159825] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6955a1cf-3a69-4b6f-87e5-3bcaddb0a2ce {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.168116] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63db5a0f-5f85-4241-8de7-fa755ec027cf {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.196656] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92624785-f219-4b10-b885-c1061407e4f1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.203228] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2118eab-ea30-418d-a3fe-56e80f6388a1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.215999] env[61663]: DEBUG nova.compute.provider_tree [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2117.226535] env[61663]: DEBUG nova.scheduler.client.report [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2117.239779] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.356s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2117.240303] env[61663]: ERROR nova.compute.manager [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2117.240303] env[61663]: Faults: ['InvalidArgument'] [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Traceback (most recent call last): [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] self.driver.spawn(context, instance, image_meta, [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] self._fetch_image_if_missing(context, vi) [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] image_cache(vi, tmp_image_ds_loc) [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] vm_util.copy_virtual_disk( [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] session._wait_for_task(vmdk_copy_task) [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] return self.wait_for_task(task_ref) [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] return evt.wait() [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] result = hub.switch() [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] return self.greenlet.switch() [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] self.f(*self.args, **self.kw) [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] raise exceptions.translate_fault(task_info.error) [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Faults: ['InvalidArgument'] [ 2117.240303] env[61663]: ERROR nova.compute.manager [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] [ 2117.241024] env[61663]: DEBUG nova.compute.utils [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2117.242436] env[61663]: DEBUG nova.compute.manager [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Build of instance ef8528db-1338-4af6-9d4a-5eda7fe69a98 was re-scheduled: A specified parameter was not correct: fileType [ 2117.242436] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2117.242841] env[61663]: DEBUG nova.compute.manager [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2117.243038] env[61663]: DEBUG nova.compute.manager [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2117.243236] env[61663]: DEBUG nova.compute.manager [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2117.243407] env[61663]: DEBUG nova.network.neutron [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2117.686102] env[61663]: DEBUG nova.network.neutron [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2117.704018] env[61663]: INFO nova.compute.manager [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Took 0.46 seconds to deallocate network for instance. [ 2117.815021] env[61663]: INFO nova.scheduler.client.report [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Deleted allocations for instance ef8528db-1338-4af6-9d4a-5eda7fe69a98 [ 2117.843172] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a60eff62-ebb6-40e1-9b3f-09b77259b147 tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Lock "ef8528db-1338-4af6-9d4a-5eda7fe69a98" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 644.390s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2117.843172] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5ccab1f7-fbeb-4ca3-ae2b-247ff360ca4a tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Lock "ef8528db-1338-4af6-9d4a-5eda7fe69a98" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 446.285s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2117.843172] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5ccab1f7-fbeb-4ca3-ae2b-247ff360ca4a tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Acquiring lock "ef8528db-1338-4af6-9d4a-5eda7fe69a98-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2117.843172] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5ccab1f7-fbeb-4ca3-ae2b-247ff360ca4a tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Lock "ef8528db-1338-4af6-9d4a-5eda7fe69a98-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2117.843172] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5ccab1f7-fbeb-4ca3-ae2b-247ff360ca4a tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Lock "ef8528db-1338-4af6-9d4a-5eda7fe69a98-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2117.846114] env[61663]: INFO nova.compute.manager [None req-5ccab1f7-fbeb-4ca3-ae2b-247ff360ca4a tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Terminating instance [ 2117.848637] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5ccab1f7-fbeb-4ca3-ae2b-247ff360ca4a tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Acquiring lock "refresh_cache-ef8528db-1338-4af6-9d4a-5eda7fe69a98" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2117.848929] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5ccab1f7-fbeb-4ca3-ae2b-247ff360ca4a tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Acquired lock "refresh_cache-ef8528db-1338-4af6-9d4a-5eda7fe69a98" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2117.849238] env[61663]: DEBUG nova.network.neutron [None req-5ccab1f7-fbeb-4ca3-ae2b-247ff360ca4a tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2117.861830] env[61663]: DEBUG nova.compute.manager [None req-6a07f348-dfc4-476f-8ba4-5234b0855129 tempest-ServerMetadataNegativeTestJSON-1947257258 tempest-ServerMetadataNegativeTestJSON-1947257258-project-member] [instance: 44a9bf55-1c16-49aa-a61f-611696fb2c54] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2117.892273] env[61663]: DEBUG nova.compute.manager [None req-6a07f348-dfc4-476f-8ba4-5234b0855129 tempest-ServerMetadataNegativeTestJSON-1947257258 tempest-ServerMetadataNegativeTestJSON-1947257258-project-member] [instance: 44a9bf55-1c16-49aa-a61f-611696fb2c54] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 2117.894486] env[61663]: DEBUG nova.network.neutron [None req-5ccab1f7-fbeb-4ca3-ae2b-247ff360ca4a tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2117.917110] env[61663]: DEBUG oslo_concurrency.lockutils [None req-6a07f348-dfc4-476f-8ba4-5234b0855129 tempest-ServerMetadataNegativeTestJSON-1947257258 tempest-ServerMetadataNegativeTestJSON-1947257258-project-member] Lock "44a9bf55-1c16-49aa-a61f-611696fb2c54" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.142s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2117.927757] env[61663]: DEBUG nova.compute.manager [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2117.980028] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2117.980310] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2117.981836] env[61663]: INFO nova.compute.claims [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2118.037761] env[61663]: DEBUG nova.network.neutron [None req-5ccab1f7-fbeb-4ca3-ae2b-247ff360ca4a tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2118.045654] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5ccab1f7-fbeb-4ca3-ae2b-247ff360ca4a tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Releasing lock "refresh_cache-ef8528db-1338-4af6-9d4a-5eda7fe69a98" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2118.046047] env[61663]: DEBUG nova.compute.manager [None req-5ccab1f7-fbeb-4ca3-ae2b-247ff360ca4a tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2118.046250] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5ccab1f7-fbeb-4ca3-ae2b-247ff360ca4a tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2118.049045] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-83282df5-32d5-436a-8885-e8afcb7563c7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.058675] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7172d427-a0eb-418d-ac1a-705878289d31 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.089220] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-5ccab1f7-fbeb-4ca3-ae2b-247ff360ca4a tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ef8528db-1338-4af6-9d4a-5eda7fe69a98 could not be found. [ 2118.089483] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5ccab1f7-fbeb-4ca3-ae2b-247ff360ca4a tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2118.089706] env[61663]: INFO nova.compute.manager [None req-5ccab1f7-fbeb-4ca3-ae2b-247ff360ca4a tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2118.089992] env[61663]: DEBUG oslo.service.loopingcall [None req-5ccab1f7-fbeb-4ca3-ae2b-247ff360ca4a tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2118.092689] env[61663]: DEBUG nova.compute.manager [-] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2118.093074] env[61663]: DEBUG nova.network.neutron [-] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2118.111598] env[61663]: DEBUG nova.network.neutron [-] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2118.120024] env[61663]: DEBUG nova.network.neutron [-] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2118.129810] env[61663]: INFO nova.compute.manager [-] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] Took 0.04 seconds to deallocate network for instance. [ 2118.227626] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5ccab1f7-fbeb-4ca3-ae2b-247ff360ca4a tempest-MultipleCreateTestJSON-691398207 tempest-MultipleCreateTestJSON-691398207-project-member] Lock "ef8528db-1338-4af6-9d4a-5eda7fe69a98" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.386s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2118.228922] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "ef8528db-1338-4af6-9d4a-5eda7fe69a98" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 329.142s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2118.229081] env[61663]: INFO nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: ef8528db-1338-4af6-9d4a-5eda7fe69a98] During sync_power_state the instance has a pending task (deleting). Skip. [ 2118.229172] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "ef8528db-1338-4af6-9d4a-5eda7fe69a98" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2118.239880] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b9195c-f098-44f2-b95f-8cba95e5095a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.247944] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28afc97b-8df1-4efb-9515-0e8359d43c35 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.278203] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adc54726-f652-4760-806d-ca0d052d7edc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.285336] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f667ded-fb96-48d9-aacd-fa5f811485fd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.298010] env[61663]: DEBUG nova.compute.provider_tree [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2118.307444] env[61663]: DEBUG nova.scheduler.client.report [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2118.319864] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.340s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2118.320347] env[61663]: DEBUG nova.compute.manager [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2118.353516] env[61663]: DEBUG nova.compute.utils [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2118.354935] env[61663]: DEBUG nova.compute.manager [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2118.355090] env[61663]: DEBUG nova.network.neutron [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2118.362827] env[61663]: DEBUG nova.compute.manager [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2118.418721] env[61663]: DEBUG nova.policy [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e32a3ed536f140a2b10532295b389127', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ab3cef71bf2e45d6b45c5a77b2b13670', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 2118.448014] env[61663]: DEBUG nova.compute.manager [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2118.473297] env[61663]: DEBUG nova.virt.hardware [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2118.473602] env[61663]: DEBUG nova.virt.hardware [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2118.473774] env[61663]: DEBUG nova.virt.hardware [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2118.473961] env[61663]: DEBUG nova.virt.hardware [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2118.474125] env[61663]: DEBUG nova.virt.hardware [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2118.474578] env[61663]: DEBUG nova.virt.hardware [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2118.474578] env[61663]: DEBUG nova.virt.hardware [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2118.474655] env[61663]: DEBUG nova.virt.hardware [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2118.474818] env[61663]: DEBUG nova.virt.hardware [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2118.475085] env[61663]: DEBUG nova.virt.hardware [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2118.475165] env[61663]: DEBUG nova.virt.hardware [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2118.475996] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee7d2bb-3da3-4441-8994-ddccf592dc09 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.483886] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a81d16-32d7-4075-b89a-221a8a445767 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.916300] env[61663]: DEBUG nova.network.neutron [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Successfully created port: f47ebbd9-5144-437d-a6ba-4ea5ea47d21a {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2119.771305] env[61663]: DEBUG nova.network.neutron [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Successfully updated port: f47ebbd9-5144-437d-a6ba-4ea5ea47d21a {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2119.788023] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Acquiring lock "refresh_cache-54a78c20-cbf6-453b-88e4-2fb4da0a6200" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2119.788188] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Acquired lock "refresh_cache-54a78c20-cbf6-453b-88e4-2fb4da0a6200" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2119.788338] env[61663]: DEBUG nova.network.neutron [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2119.795226] env[61663]: DEBUG nova.compute.manager [req-1e435ec7-9c64-4f68-a763-dd5e7e6435bb req-ef231edb-6623-4583-8f55-b74e6461bb94 service nova] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Received event network-vif-plugged-f47ebbd9-5144-437d-a6ba-4ea5ea47d21a {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2119.795437] env[61663]: DEBUG oslo_concurrency.lockutils [req-1e435ec7-9c64-4f68-a763-dd5e7e6435bb req-ef231edb-6623-4583-8f55-b74e6461bb94 service nova] Acquiring lock "54a78c20-cbf6-453b-88e4-2fb4da0a6200-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2119.795687] env[61663]: DEBUG oslo_concurrency.lockutils [req-1e435ec7-9c64-4f68-a763-dd5e7e6435bb req-ef231edb-6623-4583-8f55-b74e6461bb94 service nova] Lock "54a78c20-cbf6-453b-88e4-2fb4da0a6200-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2119.795800] env[61663]: DEBUG oslo_concurrency.lockutils [req-1e435ec7-9c64-4f68-a763-dd5e7e6435bb req-ef231edb-6623-4583-8f55-b74e6461bb94 service nova] Lock "54a78c20-cbf6-453b-88e4-2fb4da0a6200-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2119.799215] env[61663]: DEBUG nova.compute.manager [req-1e435ec7-9c64-4f68-a763-dd5e7e6435bb req-ef231edb-6623-4583-8f55-b74e6461bb94 service nova] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] No waiting events found dispatching network-vif-plugged-f47ebbd9-5144-437d-a6ba-4ea5ea47d21a {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2119.799520] env[61663]: WARNING nova.compute.manager [req-1e435ec7-9c64-4f68-a763-dd5e7e6435bb req-ef231edb-6623-4583-8f55-b74e6461bb94 service nova] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Received unexpected event network-vif-plugged-f47ebbd9-5144-437d-a6ba-4ea5ea47d21a for instance with vm_state building and task_state spawning. [ 2120.075646] env[61663]: DEBUG nova.network.neutron [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2120.415664] env[61663]: DEBUG nova.network.neutron [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Updating instance_info_cache with network_info: [{"id": "f47ebbd9-5144-437d-a6ba-4ea5ea47d21a", "address": "fa:16:3e:0b:10:42", "network": {"id": "0bf1355e-3265-4cdb-9b3e-f1e1876f43b6", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1573123323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ab3cef71bf2e45d6b45c5a77b2b13670", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf47ebbd9-51", "ovs_interfaceid": "f47ebbd9-5144-437d-a6ba-4ea5ea47d21a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2120.426811] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Releasing lock "refresh_cache-54a78c20-cbf6-453b-88e4-2fb4da0a6200" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2120.427118] env[61663]: DEBUG nova.compute.manager [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Instance network_info: |[{"id": "f47ebbd9-5144-437d-a6ba-4ea5ea47d21a", "address": "fa:16:3e:0b:10:42", "network": {"id": "0bf1355e-3265-4cdb-9b3e-f1e1876f43b6", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1573123323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ab3cef71bf2e45d6b45c5a77b2b13670", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf47ebbd9-51", "ovs_interfaceid": "f47ebbd9-5144-437d-a6ba-4ea5ea47d21a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2120.427531] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:10:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ecc4615-18f0-4324-8e16-5e5d513325e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f47ebbd9-5144-437d-a6ba-4ea5ea47d21a', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2120.435112] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Creating folder: Project (ab3cef71bf2e45d6b45c5a77b2b13670). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2120.435737] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-756cc8e6-24da-49c9-8135-58a829e40e4f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.448132] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Created folder: Project (ab3cef71bf2e45d6b45c5a77b2b13670) in parent group-v352575. [ 2120.448324] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Creating folder: Instances. Parent ref: group-v352664. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2120.448548] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ce3d740a-5ff6-4a8c-8f2d-7d860c784166 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.458825] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Created folder: Instances in parent group-v352664. [ 2120.459066] env[61663]: DEBUG oslo.service.loopingcall [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2120.459249] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2120.459442] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2e6cf7ac-e6a7-4cc9-b976-051538710323 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.477716] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2120.477716] env[61663]: value = "task-1690831" [ 2120.477716] env[61663]: _type = "Task" [ 2120.477716] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.484725] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690831, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.987884] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690831, 'name': CreateVM_Task, 'duration_secs': 0.267553} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.988063] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2120.988691] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2120.988858] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2120.989235] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2120.989478] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c55a0810-d87b-4324-825d-64e033fb69b8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.993807] env[61663]: DEBUG oslo_vmware.api [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Waiting for the task: (returnval){ [ 2120.993807] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]521e434a-2a97-989f-7189-84a4333f56d0" [ 2120.993807] env[61663]: _type = "Task" [ 2120.993807] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.002482] env[61663]: DEBUG oslo_vmware.api [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]521e434a-2a97-989f-7189-84a4333f56d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.504830] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2121.505219] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2121.505345] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2121.821317] env[61663]: DEBUG nova.compute.manager [req-945810c7-8e40-462e-abd7-ff5a884261dc req-0bc8c27a-e291-4d1e-88db-0f20e1a1f531 service nova] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Received event network-changed-f47ebbd9-5144-437d-a6ba-4ea5ea47d21a {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2121.821567] env[61663]: DEBUG nova.compute.manager [req-945810c7-8e40-462e-abd7-ff5a884261dc req-0bc8c27a-e291-4d1e-88db-0f20e1a1f531 service nova] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Refreshing instance network info cache due to event network-changed-f47ebbd9-5144-437d-a6ba-4ea5ea47d21a. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2121.821757] env[61663]: DEBUG oslo_concurrency.lockutils [req-945810c7-8e40-462e-abd7-ff5a884261dc req-0bc8c27a-e291-4d1e-88db-0f20e1a1f531 service nova] Acquiring lock "refresh_cache-54a78c20-cbf6-453b-88e4-2fb4da0a6200" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2121.821909] env[61663]: DEBUG oslo_concurrency.lockutils [req-945810c7-8e40-462e-abd7-ff5a884261dc req-0bc8c27a-e291-4d1e-88db-0f20e1a1f531 service nova] Acquired lock "refresh_cache-54a78c20-cbf6-453b-88e4-2fb4da0a6200" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2121.822168] env[61663]: DEBUG nova.network.neutron [req-945810c7-8e40-462e-abd7-ff5a884261dc req-0bc8c27a-e291-4d1e-88db-0f20e1a1f531 service nova] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Refreshing network info cache for port f47ebbd9-5144-437d-a6ba-4ea5ea47d21a {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2122.120511] env[61663]: DEBUG nova.network.neutron [req-945810c7-8e40-462e-abd7-ff5a884261dc req-0bc8c27a-e291-4d1e-88db-0f20e1a1f531 service nova] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Updated VIF entry in instance network info cache for port f47ebbd9-5144-437d-a6ba-4ea5ea47d21a. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2122.120878] env[61663]: DEBUG nova.network.neutron [req-945810c7-8e40-462e-abd7-ff5a884261dc req-0bc8c27a-e291-4d1e-88db-0f20e1a1f531 service nova] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Updating instance_info_cache with network_info: [{"id": "f47ebbd9-5144-437d-a6ba-4ea5ea47d21a", "address": "fa:16:3e:0b:10:42", "network": {"id": "0bf1355e-3265-4cdb-9b3e-f1e1876f43b6", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1573123323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ab3cef71bf2e45d6b45c5a77b2b13670", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf47ebbd9-51", "ovs_interfaceid": "f47ebbd9-5144-437d-a6ba-4ea5ea47d21a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2122.130412] env[61663]: DEBUG oslo_concurrency.lockutils [req-945810c7-8e40-462e-abd7-ff5a884261dc req-0bc8c27a-e291-4d1e-88db-0f20e1a1f531 service nova] Releasing lock "refresh_cache-54a78c20-cbf6-453b-88e4-2fb4da0a6200" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2128.954663] env[61663]: DEBUG oslo_concurrency.lockutils [None req-55f684e3-ea8b-48c3-bf81-a440bafa2994 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Acquiring lock "54a78c20-cbf6-453b-88e4-2fb4da0a6200" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2132.284927] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Acquiring lock "202e0f58-b057-4e57-8a92-c06d6efda570" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2132.285245] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Lock "202e0f58-b057-4e57-8a92-c06d6efda570" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2135.693365] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2136.692347] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2140.687640] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2142.691851] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2143.692876] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2143.693264] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2143.693264] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2143.713619] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2143.713774] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2143.713899] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2143.714156] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2143.714355] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2143.714483] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2143.714606] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2143.714728] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2143.714847] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2143.714971] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2143.715108] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2143.715574] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2144.693061] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2144.693449] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2144.704302] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2144.704517] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2144.704687] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2144.704844] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2144.705967] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c7c75cb-6d61-4f74-8d32-09e91b48362a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.714860] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de19f088-c89b-4637-ae9b-a0634e85bf41 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.728550] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07952ec9-8f45-4550-9804-530d6da78a4c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.734425] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8538cfe5-b685-498f-8b43-a964224aff87 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.762496] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181310MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2144.762624] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2144.762806] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2144.836590] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 94f7665c-5247-4474-a9ea-700f1778af81 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2144.836963] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 768bef02-a114-4cac-a614-6e8a04ce0d18 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2144.836963] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2144.837084] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b51a331f-2b96-457f-9c9e-99379e8ae7fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2144.837156] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 440175fc-da0c-4ea3-9a74-46e97e32658b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2144.837279] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 668c457f-7ebc-441f-8ece-cc63c571363b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2144.837402] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ae347f45-f39e-47eb-9e37-80ddfc502c27 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2144.837517] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 525749ba-7de2-4ec5-8f7b-1f4c291710fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2144.837628] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 740f7887-4a5c-4889-9635-e9d9c6607ee7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2144.837740] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 54a78c20-cbf6-453b-88e4-2fb4da0a6200 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2144.851434] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 8e705624-9787-4d34-a3d4-f56b7b4fdcc2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2144.862163] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 12ecd7c0-dcb3-42f6-8560-c239f786254c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2144.871833] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance e47c9821-f815-4bd5-bf00-8822f08e3333 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2144.881148] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b58c2b47-6508-491c-ad2c-ac86e654c7ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2144.890243] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance c21a5af5-004b-4544-bcf0-f105d6f336c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2144.900914] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 202e0f58-b057-4e57-8a92-c06d6efda570 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2144.900914] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2144.900914] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2145.092812] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f416e51c-aa1d-47de-abdf-937069fc654d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.100118] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c78f57f-ef9e-49fd-92ca-3d029138bd43 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.128953] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e71290f3-f423-454f-bb2e-744fb40d46c5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.135814] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0067c7ad-f0c7-4a5f-bc9a-5a27ec9dc8c9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.148272] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2145.156313] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2145.170414] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2145.170593] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.408s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2149.171466] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2149.171466] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2155.688603] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2166.675077] env[61663]: WARNING oslo_vmware.rw_handles [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2166.675077] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2166.675077] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2166.675077] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2166.675077] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2166.675077] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 2166.675077] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2166.675077] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2166.675077] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2166.675077] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2166.675077] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2166.675077] env[61663]: ERROR oslo_vmware.rw_handles [ 2166.675788] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/8f5178c5-29ad-4a6f-b021-ea36f2aba118/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2166.677447] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2166.677691] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Copying Virtual Disk [datastore1] vmware_temp/8f5178c5-29ad-4a6f-b021-ea36f2aba118/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/8f5178c5-29ad-4a6f-b021-ea36f2aba118/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2166.677972] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e36fca14-fbab-413a-bf38-3b4054be2c4e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.687415] env[61663]: DEBUG oslo_vmware.api [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Waiting for the task: (returnval){ [ 2166.687415] env[61663]: value = "task-1690832" [ 2166.687415] env[61663]: _type = "Task" [ 2166.687415] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.695522] env[61663]: DEBUG oslo_vmware.api [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Task: {'id': task-1690832, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.198107] env[61663]: DEBUG oslo_vmware.exceptions [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2167.198391] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2167.198935] env[61663]: ERROR nova.compute.manager [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2167.198935] env[61663]: Faults: ['InvalidArgument'] [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Traceback (most recent call last): [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] yield resources [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] self.driver.spawn(context, instance, image_meta, [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] self._fetch_image_if_missing(context, vi) [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] image_cache(vi, tmp_image_ds_loc) [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] vm_util.copy_virtual_disk( [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] session._wait_for_task(vmdk_copy_task) [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] return self.wait_for_task(task_ref) [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] return evt.wait() [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] result = hub.switch() [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] return self.greenlet.switch() [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] self.f(*self.args, **self.kw) [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] raise exceptions.translate_fault(task_info.error) [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Faults: ['InvalidArgument'] [ 2167.198935] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] [ 2167.199957] env[61663]: INFO nova.compute.manager [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Terminating instance [ 2167.200839] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2167.201064] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2167.201308] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-476baa56-6d05-4230-b960-ad312fac60e3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.203813] env[61663]: DEBUG nova.compute.manager [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2167.204010] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2167.204739] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a5ed02a-a56a-43be-965a-c42cea229eb3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.212544] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2167.212797] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-471bc2f1-52fe-484e-a397-4928456eebc8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.215102] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2167.215281] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2167.216241] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a47640de-dee7-46ae-9ec6-275cdacf0bab {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.221720] env[61663]: DEBUG oslo_vmware.api [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Waiting for the task: (returnval){ [ 2167.221720] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e98412-6cce-1687-9e75-a1e5bda1b469" [ 2167.221720] env[61663]: _type = "Task" [ 2167.221720] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.228988] env[61663]: DEBUG oslo_vmware.api [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e98412-6cce-1687-9e75-a1e5bda1b469, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.288792] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2167.289134] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2167.289393] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Deleting the datastore file [datastore1] 94f7665c-5247-4474-a9ea-700f1778af81 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2167.289743] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-681955e2-e27a-46e1-837d-45701413fc56 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.296188] env[61663]: DEBUG oslo_vmware.api [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Waiting for the task: (returnval){ [ 2167.296188] env[61663]: value = "task-1690834" [ 2167.296188] env[61663]: _type = "Task" [ 2167.296188] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.303596] env[61663]: DEBUG oslo_vmware.api [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Task: {'id': task-1690834, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.732799] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2167.733077] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Creating directory with path [datastore1] vmware_temp/f15f31ad-6742-41df-9e13-9f8154142111/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2167.733347] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c4db8126-1ccd-4cc4-9830-0256869ade83 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.744553] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Created directory with path [datastore1] vmware_temp/f15f31ad-6742-41df-9e13-9f8154142111/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2167.744801] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Fetch image to [datastore1] vmware_temp/f15f31ad-6742-41df-9e13-9f8154142111/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2167.745034] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/f15f31ad-6742-41df-9e13-9f8154142111/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2167.745789] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a25e41-b82a-489c-8072-0c4a3bb34e76 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.752275] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3efdaacd-8611-469a-bede-a24b50a8bd79 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.761150] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d0b00d-044c-400c-87f2-b672398a5d0a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.791523] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a6fa29-0f16-4f7c-81a4-8ba21378e142 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.799083] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-fd585d14-4d56-46e0-8c65-6e7c9f243516 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.805346] env[61663]: DEBUG oslo_vmware.api [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Task: {'id': task-1690834, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078587} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2167.805589] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2167.805820] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2167.806048] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2167.806242] env[61663]: INFO nova.compute.manager [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2167.808398] env[61663]: DEBUG nova.compute.claims [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2167.808571] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2167.808787] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2167.818901] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2167.869708] env[61663]: DEBUG oslo_vmware.rw_handles [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f15f31ad-6742-41df-9e13-9f8154142111/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2167.932760] env[61663]: DEBUG oslo_vmware.rw_handles [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2167.932957] env[61663]: DEBUG oslo_vmware.rw_handles [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f15f31ad-6742-41df-9e13-9f8154142111/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2168.089236] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66b7745-2600-46d1-9b57-4d21a8498011 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.097223] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1072efa-3c57-49ae-862d-e56c3a40f606 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.126163] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d495a2a3-9219-4e2c-b5ea-04602edbe728 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.133161] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96923da0-4c34-4de1-8ae8-68132095fd86 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.145838] env[61663]: DEBUG nova.compute.provider_tree [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2168.153973] env[61663]: DEBUG nova.scheduler.client.report [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2168.167706] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.358s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2168.167706] env[61663]: ERROR nova.compute.manager [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2168.167706] env[61663]: Faults: ['InvalidArgument'] [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Traceback (most recent call last): [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] self.driver.spawn(context, instance, image_meta, [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] self._fetch_image_if_missing(context, vi) [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] image_cache(vi, tmp_image_ds_loc) [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] vm_util.copy_virtual_disk( [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] session._wait_for_task(vmdk_copy_task) [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] return self.wait_for_task(task_ref) [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] return evt.wait() [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] result = hub.switch() [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] return self.greenlet.switch() [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] self.f(*self.args, **self.kw) [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] raise exceptions.translate_fault(task_info.error) [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Faults: ['InvalidArgument'] [ 2168.167706] env[61663]: ERROR nova.compute.manager [instance: 94f7665c-5247-4474-a9ea-700f1778af81] [ 2168.168565] env[61663]: DEBUG nova.compute.utils [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2168.171030] env[61663]: DEBUG nova.compute.manager [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Build of instance 94f7665c-5247-4474-a9ea-700f1778af81 was re-scheduled: A specified parameter was not correct: fileType [ 2168.171030] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2168.172027] env[61663]: DEBUG nova.compute.manager [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2168.172027] env[61663]: DEBUG nova.compute.manager [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2168.172027] env[61663]: DEBUG nova.compute.manager [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2168.172027] env[61663]: DEBUG nova.network.neutron [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2168.859197] env[61663]: DEBUG nova.network.neutron [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2168.872040] env[61663]: INFO nova.compute.manager [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Took 0.70 seconds to deallocate network for instance. [ 2168.962285] env[61663]: INFO nova.scheduler.client.report [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Deleted allocations for instance 94f7665c-5247-4474-a9ea-700f1778af81 [ 2168.985740] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4db98392-c038-404e-befe-f05775a53e45 tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Lock "94f7665c-5247-4474-a9ea-700f1778af81" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 635.647s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2168.986511] env[61663]: DEBUG oslo_concurrency.lockutils [None req-525652ea-eb7d-4efc-a586-861d2ae9f19b tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Lock "94f7665c-5247-4474-a9ea-700f1778af81" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 438.954s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2168.986932] env[61663]: DEBUG oslo_concurrency.lockutils [None req-525652ea-eb7d-4efc-a586-861d2ae9f19b tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Acquiring lock "94f7665c-5247-4474-a9ea-700f1778af81-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2168.986932] env[61663]: DEBUG oslo_concurrency.lockutils [None req-525652ea-eb7d-4efc-a586-861d2ae9f19b tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Lock "94f7665c-5247-4474-a9ea-700f1778af81-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2168.987086] env[61663]: DEBUG oslo_concurrency.lockutils [None req-525652ea-eb7d-4efc-a586-861d2ae9f19b tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Lock "94f7665c-5247-4474-a9ea-700f1778af81-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2168.989215] env[61663]: INFO nova.compute.manager [None req-525652ea-eb7d-4efc-a586-861d2ae9f19b tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Terminating instance [ 2168.990767] env[61663]: DEBUG nova.compute.manager [None req-525652ea-eb7d-4efc-a586-861d2ae9f19b tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2168.990971] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-525652ea-eb7d-4efc-a586-861d2ae9f19b tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2168.991419] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-859a025c-6e9a-451b-bf4b-5ee3d8e89189 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.001667] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-988f17b2-0fc5-422f-b3f4-4e9f87a18adc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.019782] env[61663]: DEBUG nova.compute.manager [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2169.031219] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-525652ea-eb7d-4efc-a586-861d2ae9f19b tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 94f7665c-5247-4474-a9ea-700f1778af81 could not be found. [ 2169.031433] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-525652ea-eb7d-4efc-a586-861d2ae9f19b tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2169.031615] env[61663]: INFO nova.compute.manager [None req-525652ea-eb7d-4efc-a586-861d2ae9f19b tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2169.031859] env[61663]: DEBUG oslo.service.loopingcall [None req-525652ea-eb7d-4efc-a586-861d2ae9f19b tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2169.032293] env[61663]: DEBUG nova.compute.manager [-] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2169.032390] env[61663]: DEBUG nova.network.neutron [-] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2169.058474] env[61663]: DEBUG nova.network.neutron [-] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2169.065358] env[61663]: INFO nova.compute.manager [-] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] Took 0.03 seconds to deallocate network for instance. [ 2169.068572] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2169.068832] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2169.070301] env[61663]: INFO nova.compute.claims [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2169.180019] env[61663]: DEBUG oslo_concurrency.lockutils [None req-525652ea-eb7d-4efc-a586-861d2ae9f19b tempest-ServerGroupTestJSON-1795276624 tempest-ServerGroupTestJSON-1795276624-project-member] Lock "94f7665c-5247-4474-a9ea-700f1778af81" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.189s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2169.180019] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "94f7665c-5247-4474-a9ea-700f1778af81" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 380.089s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2169.180019] env[61663]: INFO nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 94f7665c-5247-4474-a9ea-700f1778af81] During sync_power_state the instance has a pending task (deleting). Skip. [ 2169.180019] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "94f7665c-5247-4474-a9ea-700f1778af81" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2169.330744] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed0a864-4561-4312-a89e-6e0016ff1ea9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.338171] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3218b1e5-b8d4-4b1d-8e07-f4807ad79f29 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.369918] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b834d81c-2a9e-4cad-9892-474cd2106f00 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.377408] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85922b2c-33ff-4b2e-bca5-bae6bce5c3e1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.391451] env[61663]: DEBUG nova.compute.provider_tree [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2169.400052] env[61663]: DEBUG nova.scheduler.client.report [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2169.414662] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.346s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2169.415135] env[61663]: DEBUG nova.compute.manager [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2169.449009] env[61663]: DEBUG nova.compute.utils [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2169.450424] env[61663]: DEBUG nova.compute.manager [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Not allocating networking since 'none' was specified. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 2169.458683] env[61663]: DEBUG nova.compute.manager [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2169.516149] env[61663]: DEBUG nova.compute.manager [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2169.541801] env[61663]: DEBUG nova.virt.hardware [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2169.542063] env[61663]: DEBUG nova.virt.hardware [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2169.542227] env[61663]: DEBUG nova.virt.hardware [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2169.542413] env[61663]: DEBUG nova.virt.hardware [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2169.542633] env[61663]: DEBUG nova.virt.hardware [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2169.542793] env[61663]: DEBUG nova.virt.hardware [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2169.542999] env[61663]: DEBUG nova.virt.hardware [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2169.543209] env[61663]: DEBUG nova.virt.hardware [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2169.543382] env[61663]: DEBUG nova.virt.hardware [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2169.543580] env[61663]: DEBUG nova.virt.hardware [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2169.543785] env[61663]: DEBUG nova.virt.hardware [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2169.544650] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba7b5a01-a368-41b1-92e9-8f4e8e570778 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.553070] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c30010c8-cc03-4111-9455-93a710286012 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.566480] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Instance VIF info [] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2169.571848] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Creating folder: Project (d9f2308fe1c642d586719d97225252c6). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2169.572118] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2d4494d7-81a2-4f25-ad13-cfda76acbf5d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.582310] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Created folder: Project (d9f2308fe1c642d586719d97225252c6) in parent group-v352575. [ 2169.582501] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Creating folder: Instances. Parent ref: group-v352667. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2169.582712] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-03cdd4fd-2162-40c1-8b37-c8b142d00270 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.591629] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Created folder: Instances in parent group-v352667. [ 2169.591847] env[61663]: DEBUG oslo.service.loopingcall [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2169.592033] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2169.592219] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e433c294-4c02-48e6-a455-c2e6e6e7ddc2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.607561] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2169.607561] env[61663]: value = "task-1690837" [ 2169.607561] env[61663]: _type = "Task" [ 2169.607561] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2169.614197] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690837, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.118232] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690837, 'name': CreateVM_Task, 'duration_secs': 0.247955} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2170.118569] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2170.118780] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2170.118943] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2170.119288] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2170.119534] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28948abb-b02b-4359-9d62-2b53cc01ed98 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.123586] env[61663]: DEBUG oslo_vmware.api [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Waiting for the task: (returnval){ [ 2170.123586] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5242b168-fcfd-88a4-916f-c33898131acf" [ 2170.123586] env[61663]: _type = "Task" [ 2170.123586] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2170.130426] env[61663]: DEBUG oslo_vmware.api [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5242b168-fcfd-88a4-916f-c33898131acf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.633833] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2170.634113] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2170.634335] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2172.070221] env[61663]: DEBUG oslo_concurrency.lockutils [None req-8b9561a1-1d76-4dc0-973a-225667f48d95 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Acquiring lock "8e705624-9787-4d34-a3d4-f56b7b4fdcc2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2174.837686] env[61663]: DEBUG oslo_concurrency.lockutils [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Acquiring lock "67f8162d-a631-4f0e-b03c-fd76ee131615" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2174.837948] env[61663]: DEBUG oslo_concurrency.lockutils [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Lock "67f8162d-a631-4f0e-b03c-fd76ee131615" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2175.155865] env[61663]: DEBUG oslo_concurrency.lockutils [None req-aa308cc7-b9fc-4d6e-b0e7-a13bf9b398c1 tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Acquiring lock "cbd92feb-f2a1-41cf-8552-8cd0b0b20f0a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2175.156108] env[61663]: DEBUG oslo_concurrency.lockutils [None req-aa308cc7-b9fc-4d6e-b0e7-a13bf9b398c1 tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Lock "cbd92feb-f2a1-41cf-8552-8cd0b0b20f0a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2195.692389] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2196.692845] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2201.689181] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2203.691580] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2203.691812] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2203.691920] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2203.715858] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2203.716041] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2203.716184] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2203.716314] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2203.716437] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2203.716633] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2203.716767] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2203.716891] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2203.717023] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2203.717138] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2203.717261] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2204.691596] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2204.691843] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2204.692038] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2204.692219] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2204.704652] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2204.704843] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2204.705020] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2204.705190] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2204.706386] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d17c115d-dce4-431f-b764-132bb79c19bd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.715463] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f352d70b-ba09-44ed-8da9-78a740599ce0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.730379] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb677ec-f302-4032-a7e3-fc87ab512f18 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.736709] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3af3910-46f9-428a-8138-bb5e650e63e3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.764908] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181331MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2204.765082] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2204.765289] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2204.840213] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 768bef02-a114-4cac-a614-6e8a04ce0d18 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2204.840387] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2204.840649] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b51a331f-2b96-457f-9c9e-99379e8ae7fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2204.840649] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 440175fc-da0c-4ea3-9a74-46e97e32658b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2204.840752] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 668c457f-7ebc-441f-8ece-cc63c571363b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2204.840871] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ae347f45-f39e-47eb-9e37-80ddfc502c27 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2204.840988] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 525749ba-7de2-4ec5-8f7b-1f4c291710fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2204.841119] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 740f7887-4a5c-4889-9635-e9d9c6607ee7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2204.841233] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 54a78c20-cbf6-453b-88e4-2fb4da0a6200 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2204.841343] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 8e705624-9787-4d34-a3d4-f56b7b4fdcc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2204.851894] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance e47c9821-f815-4bd5-bf00-8822f08e3333 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2204.864131] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b58c2b47-6508-491c-ad2c-ac86e654c7ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2204.873441] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance c21a5af5-004b-4544-bcf0-f105d6f336c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2204.883369] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 202e0f58-b057-4e57-8a92-c06d6efda570 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2204.893335] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 67f8162d-a631-4f0e-b03c-fd76ee131615 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2204.902626] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance cbd92feb-f2a1-41cf-8552-8cd0b0b20f0a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2204.902905] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2204.903203] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2205.086669] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f81bee2-52b1-4bdb-8d48-2a940f11e1f2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.093882] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e39dc5c3-5f68-4181-9aba-c65d56807dc2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.124704] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9745ac8d-e347-495f-9bcb-c860546153a6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.131511] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-246d5308-eff1-4705-86ac-316af5f695c8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.144014] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2205.152958] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2205.165820] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2205.165998] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.401s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2209.166456] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2209.166456] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2214.620164] env[61663]: WARNING oslo_vmware.rw_handles [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2214.620164] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2214.620164] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2214.620164] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2214.620164] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2214.620164] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 2214.620164] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2214.620164] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2214.620164] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2214.620164] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2214.620164] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2214.620164] env[61663]: ERROR oslo_vmware.rw_handles [ 2214.620752] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/f15f31ad-6742-41df-9e13-9f8154142111/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2214.622706] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2214.622979] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Copying Virtual Disk [datastore1] vmware_temp/f15f31ad-6742-41df-9e13-9f8154142111/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/f15f31ad-6742-41df-9e13-9f8154142111/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2214.623278] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-91684aaf-9a70-4640-b808-38703dabbc05 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.631690] env[61663]: DEBUG oslo_vmware.api [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Waiting for the task: (returnval){ [ 2214.631690] env[61663]: value = "task-1690838" [ 2214.631690] env[61663]: _type = "Task" [ 2214.631690] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2214.639267] env[61663]: DEBUG oslo_vmware.api [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Task: {'id': task-1690838, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2215.141992] env[61663]: DEBUG oslo_vmware.exceptions [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2215.142309] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2215.142907] env[61663]: ERROR nova.compute.manager [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2215.142907] env[61663]: Faults: ['InvalidArgument'] [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Traceback (most recent call last): [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] yield resources [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] self.driver.spawn(context, instance, image_meta, [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] self._fetch_image_if_missing(context, vi) [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] image_cache(vi, tmp_image_ds_loc) [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] vm_util.copy_virtual_disk( [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] session._wait_for_task(vmdk_copy_task) [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] return self.wait_for_task(task_ref) [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] return evt.wait() [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] result = hub.switch() [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] return self.greenlet.switch() [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] self.f(*self.args, **self.kw) [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] raise exceptions.translate_fault(task_info.error) [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Faults: ['InvalidArgument'] [ 2215.142907] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] [ 2215.143747] env[61663]: INFO nova.compute.manager [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Terminating instance [ 2215.144865] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2215.145097] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2215.145342] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b4d0bccd-dfd1-4f99-9cd0-6797c019163c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.147713] env[61663]: DEBUG nova.compute.manager [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2215.147908] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2215.148637] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa51749-0f0a-4d33-a289-a28f85242e43 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.155177] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2215.155387] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-08d79459-3844-40a2-b4a3-ca5a0048098c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.157537] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2215.157762] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2215.158666] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4595443-69da-43cf-b995-145f243d9e4e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.164332] env[61663]: DEBUG oslo_vmware.api [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for the task: (returnval){ [ 2215.164332] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529dfe2e-e39d-e3c1-7dbe-6348dfb15841" [ 2215.164332] env[61663]: _type = "Task" [ 2215.164332] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2215.175360] env[61663]: DEBUG oslo_vmware.api [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529dfe2e-e39d-e3c1-7dbe-6348dfb15841, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2215.231622] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2215.231622] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2215.231622] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Deleting the datastore file [datastore1] 768bef02-a114-4cac-a614-6e8a04ce0d18 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2215.231622] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6fef6599-caa6-4b7c-99af-8f2d1d57ece5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.238359] env[61663]: DEBUG oslo_vmware.api [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Waiting for the task: (returnval){ [ 2215.238359] env[61663]: value = "task-1690840" [ 2215.238359] env[61663]: _type = "Task" [ 2215.238359] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2215.246112] env[61663]: DEBUG oslo_vmware.api [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Task: {'id': task-1690840, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2215.675129] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2215.675497] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Creating directory with path [datastore1] vmware_temp/8a38b57f-d1b7-4637-ba9d-cb8c1e9baa79/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2215.675628] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-016b7853-56dd-4e93-a56d-b988fd345062 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.686582] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Created directory with path [datastore1] vmware_temp/8a38b57f-d1b7-4637-ba9d-cb8c1e9baa79/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2215.686767] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Fetch image to [datastore1] vmware_temp/8a38b57f-d1b7-4637-ba9d-cb8c1e9baa79/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2215.686950] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/8a38b57f-d1b7-4637-ba9d-cb8c1e9baa79/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2215.687681] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59ca7f8a-fef0-47b0-bc49-0751377160d9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.694053] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc421ba-0a12-46ad-9268-dc107c73b266 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.702513] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69888a07-d2b6-4fd3-8ebd-f6a02a960f4c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.731787] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-627c975a-36a6-461d-8f6b-046f66277a4c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.736918] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cff94c48-8d38-490a-a1ed-7653b6f92544 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.745391] env[61663]: DEBUG oslo_vmware.api [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Task: {'id': task-1690840, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078311} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2215.745610] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2215.745788] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2215.745958] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2215.746152] env[61663]: INFO nova.compute.manager [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2215.748195] env[61663]: DEBUG nova.compute.claims [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2215.748436] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2215.748665] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2215.760080] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2215.809966] env[61663]: DEBUG oslo_vmware.rw_handles [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8a38b57f-d1b7-4637-ba9d-cb8c1e9baa79/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2215.870476] env[61663]: DEBUG oslo_vmware.rw_handles [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2215.870664] env[61663]: DEBUG oslo_vmware.rw_handles [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8a38b57f-d1b7-4637-ba9d-cb8c1e9baa79/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2216.012242] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e46eaa-bea9-4ca8-9229-8083d2037f2e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.019679] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ae997ce-1cea-4db4-8d14-51ce973afa14 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.049448] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be34d74-dfee-4547-a880-550048e2f9dc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.056105] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99c792b-dde7-43eb-87a4-f954f1591ad3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.068425] env[61663]: DEBUG nova.compute.provider_tree [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2216.077203] env[61663]: DEBUG nova.scheduler.client.report [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2216.089933] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.341s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2216.090481] env[61663]: ERROR nova.compute.manager [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2216.090481] env[61663]: Faults: ['InvalidArgument'] [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Traceback (most recent call last): [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] self.driver.spawn(context, instance, image_meta, [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] self._fetch_image_if_missing(context, vi) [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] image_cache(vi, tmp_image_ds_loc) [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] vm_util.copy_virtual_disk( [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] session._wait_for_task(vmdk_copy_task) [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] return self.wait_for_task(task_ref) [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] return evt.wait() [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] result = hub.switch() [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] return self.greenlet.switch() [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] self.f(*self.args, **self.kw) [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] raise exceptions.translate_fault(task_info.error) [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Faults: ['InvalidArgument'] [ 2216.090481] env[61663]: ERROR nova.compute.manager [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] [ 2216.091347] env[61663]: DEBUG nova.compute.utils [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2216.092646] env[61663]: DEBUG nova.compute.manager [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Build of instance 768bef02-a114-4cac-a614-6e8a04ce0d18 was re-scheduled: A specified parameter was not correct: fileType [ 2216.092646] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2216.093044] env[61663]: DEBUG nova.compute.manager [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2216.093240] env[61663]: DEBUG nova.compute.manager [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2216.093416] env[61663]: DEBUG nova.compute.manager [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2216.093596] env[61663]: DEBUG nova.network.neutron [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2216.698062] env[61663]: DEBUG nova.network.neutron [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2216.715081] env[61663]: INFO nova.compute.manager [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Took 0.62 seconds to deallocate network for instance. [ 2216.813305] env[61663]: INFO nova.scheduler.client.report [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Deleted allocations for instance 768bef02-a114-4cac-a614-6e8a04ce0d18 [ 2216.833660] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c90f692b-f9e9-4acf-8fc8-7c9f09f2d5c9 tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Lock "768bef02-a114-4cac-a614-6e8a04ce0d18" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 635.693s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2216.834714] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b2f07c63-9eec-4787-a827-3a1e067e04dd tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Lock "768bef02-a114-4cac-a614-6e8a04ce0d18" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 440.599s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2216.834933] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b2f07c63-9eec-4787-a827-3a1e067e04dd tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Acquiring lock "768bef02-a114-4cac-a614-6e8a04ce0d18-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2216.835155] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b2f07c63-9eec-4787-a827-3a1e067e04dd tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Lock "768bef02-a114-4cac-a614-6e8a04ce0d18-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2216.835327] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b2f07c63-9eec-4787-a827-3a1e067e04dd tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Lock "768bef02-a114-4cac-a614-6e8a04ce0d18-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2216.837887] env[61663]: INFO nova.compute.manager [None req-b2f07c63-9eec-4787-a827-3a1e067e04dd tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Terminating instance [ 2216.839552] env[61663]: DEBUG nova.compute.manager [None req-b2f07c63-9eec-4787-a827-3a1e067e04dd tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2216.839735] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b2f07c63-9eec-4787-a827-3a1e067e04dd tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2216.840223] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c095ff4f-a50b-4695-bc8b-3ea7df32c852 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.849768] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-140e4fcc-a7a6-4cbd-bf63-b816f37a8a2e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.860386] env[61663]: DEBUG nova.compute.manager [None req-997aace2-bef5-43a1-9f12-ce256dc1e079 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 12ecd7c0-dcb3-42f6-8560-c239f786254c] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2216.880911] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-b2f07c63-9eec-4787-a827-3a1e067e04dd tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 768bef02-a114-4cac-a614-6e8a04ce0d18 could not be found. [ 2216.881132] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b2f07c63-9eec-4787-a827-3a1e067e04dd tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2216.881314] env[61663]: INFO nova.compute.manager [None req-b2f07c63-9eec-4787-a827-3a1e067e04dd tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2216.881550] env[61663]: DEBUG oslo.service.loopingcall [None req-b2f07c63-9eec-4787-a827-3a1e067e04dd tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2216.881791] env[61663]: DEBUG nova.compute.manager [-] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2216.881887] env[61663]: DEBUG nova.network.neutron [-] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2216.884149] env[61663]: DEBUG nova.compute.manager [None req-997aace2-bef5-43a1-9f12-ce256dc1e079 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 12ecd7c0-dcb3-42f6-8560-c239f786254c] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 2216.903940] env[61663]: DEBUG oslo_concurrency.lockutils [None req-997aace2-bef5-43a1-9f12-ce256dc1e079 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Lock "12ecd7c0-dcb3-42f6-8560-c239f786254c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 241.610s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2216.906665] env[61663]: DEBUG nova.network.neutron [-] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2216.914151] env[61663]: DEBUG nova.compute.manager [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2216.916803] env[61663]: INFO nova.compute.manager [-] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] Took 0.03 seconds to deallocate network for instance. [ 2216.958619] env[61663]: DEBUG oslo_concurrency.lockutils [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2216.958855] env[61663]: DEBUG oslo_concurrency.lockutils [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2216.960296] env[61663]: INFO nova.compute.claims [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2217.006231] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b2f07c63-9eec-4787-a827-3a1e067e04dd tempest-ServerActionsTestOtherB-1550190657 tempest-ServerActionsTestOtherB-1550190657-project-member] Lock "768bef02-a114-4cac-a614-6e8a04ce0d18" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.171s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2217.007230] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "768bef02-a114-4cac-a614-6e8a04ce0d18" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 427.920s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2217.007467] env[61663]: INFO nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 768bef02-a114-4cac-a614-6e8a04ce0d18] During sync_power_state the instance has a pending task (deleting). Skip. [ 2217.007586] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "768bef02-a114-4cac-a614-6e8a04ce0d18" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2217.175391] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d188341-1426-49c3-8302-43c678d450a7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.183103] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-432e9f96-d6a5-4678-819f-411813a48635 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.212382] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f96f430-86fb-4ab1-bce1-c3cfaffc3cc2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.219482] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f77f00cf-538a-47a0-9d4d-3e1231778838 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.233268] env[61663]: DEBUG nova.compute.provider_tree [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2217.243586] env[61663]: DEBUG nova.scheduler.client.report [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2217.256412] env[61663]: DEBUG oslo_concurrency.lockutils [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.297s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2217.256921] env[61663]: DEBUG nova.compute.manager [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2217.287671] env[61663]: DEBUG nova.compute.utils [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2217.289034] env[61663]: DEBUG nova.compute.manager [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2217.289134] env[61663]: DEBUG nova.network.neutron [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2217.300881] env[61663]: DEBUG nova.compute.manager [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2217.355035] env[61663]: DEBUG nova.policy [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '033e5ebd18fb421b8ad3f4ad5033f1b5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7bb1bdc9b1004ff591ab4e001d81b400', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 2217.362116] env[61663]: DEBUG nova.compute.manager [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2217.388024] env[61663]: DEBUG nova.virt.hardware [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2217.388024] env[61663]: DEBUG nova.virt.hardware [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2217.388210] env[61663]: DEBUG nova.virt.hardware [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2217.388364] env[61663]: DEBUG nova.virt.hardware [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2217.388517] env[61663]: DEBUG nova.virt.hardware [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2217.388668] env[61663]: DEBUG nova.virt.hardware [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2217.388878] env[61663]: DEBUG nova.virt.hardware [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2217.389057] env[61663]: DEBUG nova.virt.hardware [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2217.389235] env[61663]: DEBUG nova.virt.hardware [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2217.389397] env[61663]: DEBUG nova.virt.hardware [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2217.389569] env[61663]: DEBUG nova.virt.hardware [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2217.390420] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-853f01fe-d05d-413a-95b8-7323bd0dbf67 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.397937] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bdb6430-90b5-42e9-bf23-5afda08a8483 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.933947] env[61663]: DEBUG nova.network.neutron [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Successfully created port: 9a44847b-5e60-4cca-b70a-7d3e53c554cd {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2218.536886] env[61663]: DEBUG nova.network.neutron [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Successfully updated port: 9a44847b-5e60-4cca-b70a-7d3e53c554cd {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2218.552771] env[61663]: DEBUG oslo_concurrency.lockutils [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "refresh_cache-e47c9821-f815-4bd5-bf00-8822f08e3333" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2218.553187] env[61663]: DEBUG oslo_concurrency.lockutils [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquired lock "refresh_cache-e47c9821-f815-4bd5-bf00-8822f08e3333" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2218.553495] env[61663]: DEBUG nova.network.neutron [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2218.643372] env[61663]: DEBUG nova.network.neutron [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2218.945917] env[61663]: DEBUG nova.network.neutron [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Updating instance_info_cache with network_info: [{"id": "9a44847b-5e60-4cca-b70a-7d3e53c554cd", "address": "fa:16:3e:8f:be:59", "network": {"id": "063a4d32-b405-433b-a1a0-ce9820683878", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-673894266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bb1bdc9b1004ff591ab4e001d81b400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a44847b-5e", "ovs_interfaceid": "9a44847b-5e60-4cca-b70a-7d3e53c554cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2218.961081] env[61663]: DEBUG oslo_concurrency.lockutils [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Releasing lock "refresh_cache-e47c9821-f815-4bd5-bf00-8822f08e3333" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2218.961389] env[61663]: DEBUG nova.compute.manager [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Instance network_info: |[{"id": "9a44847b-5e60-4cca-b70a-7d3e53c554cd", "address": "fa:16:3e:8f:be:59", "network": {"id": "063a4d32-b405-433b-a1a0-ce9820683878", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-673894266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bb1bdc9b1004ff591ab4e001d81b400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a44847b-5e", "ovs_interfaceid": "9a44847b-5e60-4cca-b70a-7d3e53c554cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2218.961853] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:be:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa09e855-8af1-419b-b78d-8ffcc94b1bfb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a44847b-5e60-4cca-b70a-7d3e53c554cd', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2218.969537] env[61663]: DEBUG oslo.service.loopingcall [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2218.970132] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2218.970397] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-537554b5-6417-4b93-9d5f-3676bf859f5f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.992284] env[61663]: DEBUG nova.compute.manager [req-7482837a-1b10-4c80-84df-7183c0ce04e3 req-f676e491-6a6d-4b4b-abd5-dd2888e481d5 service nova] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Received event network-vif-plugged-9a44847b-5e60-4cca-b70a-7d3e53c554cd {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2218.992493] env[61663]: DEBUG oslo_concurrency.lockutils [req-7482837a-1b10-4c80-84df-7183c0ce04e3 req-f676e491-6a6d-4b4b-abd5-dd2888e481d5 service nova] Acquiring lock "e47c9821-f815-4bd5-bf00-8822f08e3333-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2218.993183] env[61663]: DEBUG oslo_concurrency.lockutils [req-7482837a-1b10-4c80-84df-7183c0ce04e3 req-f676e491-6a6d-4b4b-abd5-dd2888e481d5 service nova] Lock "e47c9821-f815-4bd5-bf00-8822f08e3333-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2218.993400] env[61663]: DEBUG oslo_concurrency.lockutils [req-7482837a-1b10-4c80-84df-7183c0ce04e3 req-f676e491-6a6d-4b4b-abd5-dd2888e481d5 service nova] Lock "e47c9821-f815-4bd5-bf00-8822f08e3333-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2218.993578] env[61663]: DEBUG nova.compute.manager [req-7482837a-1b10-4c80-84df-7183c0ce04e3 req-f676e491-6a6d-4b4b-abd5-dd2888e481d5 service nova] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] No waiting events found dispatching network-vif-plugged-9a44847b-5e60-4cca-b70a-7d3e53c554cd {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2218.993748] env[61663]: WARNING nova.compute.manager [req-7482837a-1b10-4c80-84df-7183c0ce04e3 req-f676e491-6a6d-4b4b-abd5-dd2888e481d5 service nova] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Received unexpected event network-vif-plugged-9a44847b-5e60-4cca-b70a-7d3e53c554cd for instance with vm_state building and task_state spawning. [ 2218.993926] env[61663]: DEBUG nova.compute.manager [req-7482837a-1b10-4c80-84df-7183c0ce04e3 req-f676e491-6a6d-4b4b-abd5-dd2888e481d5 service nova] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Received event network-changed-9a44847b-5e60-4cca-b70a-7d3e53c554cd {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2218.994093] env[61663]: DEBUG nova.compute.manager [req-7482837a-1b10-4c80-84df-7183c0ce04e3 req-f676e491-6a6d-4b4b-abd5-dd2888e481d5 service nova] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Refreshing instance network info cache due to event network-changed-9a44847b-5e60-4cca-b70a-7d3e53c554cd. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2218.994283] env[61663]: DEBUG oslo_concurrency.lockutils [req-7482837a-1b10-4c80-84df-7183c0ce04e3 req-f676e491-6a6d-4b4b-abd5-dd2888e481d5 service nova] Acquiring lock "refresh_cache-e47c9821-f815-4bd5-bf00-8822f08e3333" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2218.994423] env[61663]: DEBUG oslo_concurrency.lockutils [req-7482837a-1b10-4c80-84df-7183c0ce04e3 req-f676e491-6a6d-4b4b-abd5-dd2888e481d5 service nova] Acquired lock "refresh_cache-e47c9821-f815-4bd5-bf00-8822f08e3333" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2218.994580] env[61663]: DEBUG nova.network.neutron [req-7482837a-1b10-4c80-84df-7183c0ce04e3 req-f676e491-6a6d-4b4b-abd5-dd2888e481d5 service nova] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Refreshing network info cache for port 9a44847b-5e60-4cca-b70a-7d3e53c554cd {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2218.999276] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2218.999276] env[61663]: value = "task-1690841" [ 2218.999276] env[61663]: _type = "Task" [ 2218.999276] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2219.008563] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690841, 'name': CreateVM_Task} progress is 6%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2219.507915] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690841, 'name': CreateVM_Task, 'duration_secs': 0.317979} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2219.508106] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2219.508715] env[61663]: DEBUG oslo_concurrency.lockutils [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2219.508882] env[61663]: DEBUG oslo_concurrency.lockutils [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2219.509251] env[61663]: DEBUG oslo_concurrency.lockutils [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2219.509506] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b80239a-271a-4c0f-a4d6-234fc6b101e6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.513938] env[61663]: DEBUG oslo_vmware.api [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for the task: (returnval){ [ 2219.513938] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52bdaca7-9c89-d3d8-0b84-e8f183443fc8" [ 2219.513938] env[61663]: _type = "Task" [ 2219.513938] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2219.522794] env[61663]: DEBUG oslo_vmware.api [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52bdaca7-9c89-d3d8-0b84-e8f183443fc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2219.613566] env[61663]: DEBUG nova.network.neutron [req-7482837a-1b10-4c80-84df-7183c0ce04e3 req-f676e491-6a6d-4b4b-abd5-dd2888e481d5 service nova] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Updated VIF entry in instance network info cache for port 9a44847b-5e60-4cca-b70a-7d3e53c554cd. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2219.613940] env[61663]: DEBUG nova.network.neutron [req-7482837a-1b10-4c80-84df-7183c0ce04e3 req-f676e491-6a6d-4b4b-abd5-dd2888e481d5 service nova] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Updating instance_info_cache with network_info: [{"id": "9a44847b-5e60-4cca-b70a-7d3e53c554cd", "address": "fa:16:3e:8f:be:59", "network": {"id": "063a4d32-b405-433b-a1a0-ce9820683878", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-673894266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bb1bdc9b1004ff591ab4e001d81b400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a44847b-5e", "ovs_interfaceid": "9a44847b-5e60-4cca-b70a-7d3e53c554cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2219.624503] env[61663]: DEBUG oslo_concurrency.lockutils [req-7482837a-1b10-4c80-84df-7183c0ce04e3 req-f676e491-6a6d-4b4b-abd5-dd2888e481d5 service nova] Releasing lock "refresh_cache-e47c9821-f815-4bd5-bf00-8822f08e3333" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2220.024662] env[61663]: DEBUG oslo_concurrency.lockutils [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2220.025306] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2220.025536] env[61663]: DEBUG oslo_concurrency.lockutils [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2239.715582] env[61663]: DEBUG oslo_concurrency.lockutils [None req-917e5edc-ffb3-4313-8a0f-0c464dbc4b35 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "e47c9821-f815-4bd5-bf00-8822f08e3333" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2257.692248] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2257.692588] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2261.595811] env[61663]: DEBUG oslo_concurrency.lockutils [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Acquiring lock "029ec7ad-96a1-42e0-a926-c1aab1de05a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2261.596119] env[61663]: DEBUG oslo_concurrency.lockutils [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Lock "029ec7ad-96a1-42e0-a926-c1aab1de05a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2263.687619] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2263.693059] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2263.693059] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2263.693059] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2263.714014] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2263.714173] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2263.714293] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2263.714420] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2263.714543] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2263.714665] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2263.714784] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2263.714900] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2263.715026] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2263.715148] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2263.715266] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2264.634352] env[61663]: WARNING oslo_vmware.rw_handles [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2264.634352] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2264.634352] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2264.634352] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2264.634352] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2264.634352] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 2264.634352] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2264.634352] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2264.634352] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2264.634352] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2264.634352] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2264.634352] env[61663]: ERROR oslo_vmware.rw_handles [ 2264.634853] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/8a38b57f-d1b7-4637-ba9d-cb8c1e9baa79/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2264.641093] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2264.641526] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Copying Virtual Disk [datastore1] vmware_temp/8a38b57f-d1b7-4637-ba9d-cb8c1e9baa79/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/8a38b57f-d1b7-4637-ba9d-cb8c1e9baa79/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2264.641855] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b4222296-3e5c-4a66-be77-d8299b6d482e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.649901] env[61663]: DEBUG oslo_vmware.api [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for the task: (returnval){ [ 2264.649901] env[61663]: value = "task-1690842" [ 2264.649901] env[61663]: _type = "Task" [ 2264.649901] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2264.657860] env[61663]: DEBUG oslo_vmware.api [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Task: {'id': task-1690842, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2264.691519] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2264.691858] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2264.691858] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2265.159631] env[61663]: DEBUG oslo_vmware.exceptions [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2265.159922] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2265.160513] env[61663]: ERROR nova.compute.manager [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2265.160513] env[61663]: Faults: ['InvalidArgument'] [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Traceback (most recent call last): [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] yield resources [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] self.driver.spawn(context, instance, image_meta, [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] self._fetch_image_if_missing(context, vi) [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] image_cache(vi, tmp_image_ds_loc) [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] vm_util.copy_virtual_disk( [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] session._wait_for_task(vmdk_copy_task) [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] return self.wait_for_task(task_ref) [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] return evt.wait() [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] result = hub.switch() [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] return self.greenlet.switch() [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] self.f(*self.args, **self.kw) [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] raise exceptions.translate_fault(task_info.error) [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Faults: ['InvalidArgument'] [ 2265.160513] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] [ 2265.161557] env[61663]: INFO nova.compute.manager [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Terminating instance [ 2265.162485] env[61663]: DEBUG oslo_concurrency.lockutils [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2265.162695] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2265.162931] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fe83ce9a-6fe5-48be-af94-3dd4596c68e3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.166647] env[61663]: DEBUG nova.compute.manager [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2265.166853] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2265.167628] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a13891-59d7-45bb-a648-c389a44e2ab9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.174401] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2265.174618] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52ffd28b-2c02-4fe6-843d-043ab11d9dc0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.176916] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2265.177124] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2265.178049] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98db7add-3001-4c81-ba9c-0abe3010f154 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.182839] env[61663]: DEBUG oslo_vmware.api [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Waiting for the task: (returnval){ [ 2265.182839] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529f244c-85b7-051e-b8b1-b4fffd129759" [ 2265.182839] env[61663]: _type = "Task" [ 2265.182839] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2265.189549] env[61663]: DEBUG oslo_vmware.api [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529f244c-85b7-051e-b8b1-b4fffd129759, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2265.257912] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2265.258227] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2265.258433] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Deleting the datastore file [datastore1] 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2265.258694] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-97ec8181-2402-47ac-ac34-58d9520d7603 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.265071] env[61663]: DEBUG oslo_vmware.api [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for the task: (returnval){ [ 2265.265071] env[61663]: value = "task-1690844" [ 2265.265071] env[61663]: _type = "Task" [ 2265.265071] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2265.273299] env[61663]: DEBUG oslo_vmware.api [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Task: {'id': task-1690844, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2265.693997] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2265.694317] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Creating directory with path [datastore1] vmware_temp/a485ae3c-ae60-42ee-b348-76d94739fd5a/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2265.694541] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b30c1ecb-3716-4cf7-8789-761637f3c8c0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.706408] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Created directory with path [datastore1] vmware_temp/a485ae3c-ae60-42ee-b348-76d94739fd5a/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2265.706570] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Fetch image to [datastore1] vmware_temp/a485ae3c-ae60-42ee-b348-76d94739fd5a/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2265.706736] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/a485ae3c-ae60-42ee-b348-76d94739fd5a/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2265.707511] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f555af9-3262-4b80-a282-98a417d42026 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.714532] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f22c367b-e6a1-4e51-a6db-1d5002439f8c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.723990] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8242b949-2854-49de-828e-b80a0ddc23de {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.753926] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a9e61e-844d-4fbb-97e9-a0dae5792715 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.759496] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e0bdf088-a574-45dc-a91b-99f5a5ff0508 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.773247] env[61663]: DEBUG oslo_vmware.api [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Task: {'id': task-1690844, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070277} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2265.773487] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2265.773671] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2265.773852] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2265.774049] env[61663]: INFO nova.compute.manager [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2265.776166] env[61663]: DEBUG nova.compute.claims [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2265.776328] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2265.776540] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2265.782662] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2265.846602] env[61663]: DEBUG oslo_vmware.rw_handles [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a485ae3c-ae60-42ee-b348-76d94739fd5a/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2265.914762] env[61663]: DEBUG oslo_vmware.rw_handles [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2265.916049] env[61663]: DEBUG oslo_vmware.rw_handles [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a485ae3c-ae60-42ee-b348-76d94739fd5a/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2266.064284] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d616050-fbe5-4c4b-87e8-7d18b37e846e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.071966] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d2e7d4-9e17-4d9f-ad8b-19d779051725 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.100770] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea635ff6-e54a-4c08-ae34-353a27e9b13c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.108015] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1de772a-8098-4ecc-92f1-6eb812d8f893 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.120837] env[61663]: DEBUG nova.compute.provider_tree [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2266.131184] env[61663]: DEBUG nova.scheduler.client.report [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2266.145743] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.369s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2266.146290] env[61663]: ERROR nova.compute.manager [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2266.146290] env[61663]: Faults: ['InvalidArgument'] [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Traceback (most recent call last): [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] self.driver.spawn(context, instance, image_meta, [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] self._fetch_image_if_missing(context, vi) [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] image_cache(vi, tmp_image_ds_loc) [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] vm_util.copy_virtual_disk( [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] session._wait_for_task(vmdk_copy_task) [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] return self.wait_for_task(task_ref) [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] return evt.wait() [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] result = hub.switch() [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] return self.greenlet.switch() [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] self.f(*self.args, **self.kw) [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] raise exceptions.translate_fault(task_info.error) [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Faults: ['InvalidArgument'] [ 2266.146290] env[61663]: ERROR nova.compute.manager [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] [ 2266.147154] env[61663]: DEBUG nova.compute.utils [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2266.148488] env[61663]: DEBUG nova.compute.manager [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Build of instance 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c was re-scheduled: A specified parameter was not correct: fileType [ 2266.148488] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2266.148858] env[61663]: DEBUG nova.compute.manager [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2266.149030] env[61663]: DEBUG nova.compute.manager [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2266.149208] env[61663]: DEBUG nova.compute.manager [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2266.149373] env[61663]: DEBUG nova.network.neutron [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2266.564028] env[61663]: DEBUG nova.network.neutron [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2266.576320] env[61663]: INFO nova.compute.manager [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Took 0.43 seconds to deallocate network for instance. [ 2266.695138] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2266.707995] env[61663]: INFO nova.scheduler.client.report [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Deleted allocations for instance 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c [ 2266.716169] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2266.716576] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2266.716753] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2266.717075] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2266.718908] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e18c17e9-dea3-4c0b-8ada-6498fca53d16 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.728875] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e611ddea-2a58-4a8e-ae9c-ee855f05a67a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.733862] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b1f6e605-90e2-434e-b2a9-ebcf60d020db tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "46ec5076-51f1-4ac9-915e-0d98ee7b1d4c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 615.265s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2266.735380] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c968449f-ff43-4cb1-8d77-f0f88261df54 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "46ec5076-51f1-4ac9-915e-0d98ee7b1d4c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 418.872s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2266.735629] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c968449f-ff43-4cb1-8d77-f0f88261df54 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "46ec5076-51f1-4ac9-915e-0d98ee7b1d4c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2266.735884] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c968449f-ff43-4cb1-8d77-f0f88261df54 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "46ec5076-51f1-4ac9-915e-0d98ee7b1d4c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2266.736120] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c968449f-ff43-4cb1-8d77-f0f88261df54 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "46ec5076-51f1-4ac9-915e-0d98ee7b1d4c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2266.748589] env[61663]: INFO nova.compute.manager [None req-c968449f-ff43-4cb1-8d77-f0f88261df54 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Terminating instance [ 2266.750632] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-991f35fd-34f7-4f93-86f9-fe5bba0f16b0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.754446] env[61663]: DEBUG nova.compute.manager [None req-c968449f-ff43-4cb1-8d77-f0f88261df54 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2266.754446] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-c968449f-ff43-4cb1-8d77-f0f88261df54 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2266.754794] env[61663]: DEBUG nova.compute.manager [None req-aca5ca74-d895-4661-8457-c008e4f214ac tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: b58c2b47-6508-491c-ad2c-ac86e654c7ae] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2266.757133] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9f0d8273-4aab-4ece-b83d-8eec3fd01587 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.767019] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beacc2df-feb7-4abf-9768-53d630ec60b5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.770728] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e0edbe-21d4-4c86-9246-d955a74e2936 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.782601] env[61663]: DEBUG nova.compute.manager [None req-aca5ca74-d895-4661-8457-c008e4f214ac tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: b58c2b47-6508-491c-ad2c-ac86e654c7ae] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 2266.807699] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181318MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2266.807850] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2266.808062] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2266.822162] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-c968449f-ff43-4cb1-8d77-f0f88261df54 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c could not be found. [ 2266.822676] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-c968449f-ff43-4cb1-8d77-f0f88261df54 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2266.822676] env[61663]: INFO nova.compute.manager [None req-c968449f-ff43-4cb1-8d77-f0f88261df54 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Took 0.07 seconds to destroy the instance on the hypervisor. [ 2266.823164] env[61663]: DEBUG oslo.service.loopingcall [None req-c968449f-ff43-4cb1-8d77-f0f88261df54 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2266.823778] env[61663]: DEBUG nova.compute.manager [-] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2266.823778] env[61663]: DEBUG nova.network.neutron [-] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2266.833214] env[61663]: DEBUG oslo_concurrency.lockutils [None req-aca5ca74-d895-4661-8457-c008e4f214ac tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Lock "b58c2b47-6508-491c-ad2c-ac86e654c7ae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.516s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2266.844013] env[61663]: DEBUG nova.compute.manager [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2266.880513] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b51a331f-2b96-457f-9c9e-99379e8ae7fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2266.880678] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 440175fc-da0c-4ea3-9a74-46e97e32658b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2266.880806] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 668c457f-7ebc-441f-8ece-cc63c571363b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2266.880941] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ae347f45-f39e-47eb-9e37-80ddfc502c27 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2266.881066] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 525749ba-7de2-4ec5-8f7b-1f4c291710fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2266.881186] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 740f7887-4a5c-4889-9635-e9d9c6607ee7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2266.881340] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 54a78c20-cbf6-453b-88e4-2fb4da0a6200 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2266.881461] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 8e705624-9787-4d34-a3d4-f56b7b4fdcc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2266.881577] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance e47c9821-f815-4bd5-bf00-8822f08e3333 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2266.891853] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance c21a5af5-004b-4544-bcf0-f105d6f336c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2266.902080] env[61663]: DEBUG oslo_concurrency.lockutils [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2266.903158] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 202e0f58-b057-4e57-8a92-c06d6efda570 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2266.913490] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 67f8162d-a631-4f0e-b03c-fd76ee131615 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2266.924351] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance cbd92feb-f2a1-41cf-8552-8cd0b0b20f0a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2266.934237] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 029ec7ad-96a1-42e0-a926-c1aab1de05a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2266.934468] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2266.934616] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2267.103757] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b22e6c-2eaf-4f55-a8fb-b2526a597b32 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.107038] env[61663]: DEBUG nova.network.neutron [-] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2267.113319] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd63dff-9c35-4f48-a48a-90049061135e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.119120] env[61663]: INFO nova.compute.manager [-] [instance: 46ec5076-51f1-4ac9-915e-0d98ee7b1d4c] Took 0.30 seconds to deallocate network for instance. [ 2267.147856] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8424bc25-59b0-4b20-853f-9fc89a99a37e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.155368] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc9114e-c4fb-4fce-b65e-0790fd59c229 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.169932] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2267.182323] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2267.198575] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2267.198788] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.391s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2267.199109] env[61663]: DEBUG oslo_concurrency.lockutils [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.297s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2267.200515] env[61663]: INFO nova.compute.claims [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2267.283884] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c968449f-ff43-4cb1-8d77-f0f88261df54 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "46ec5076-51f1-4ac9-915e-0d98ee7b1d4c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.548s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2267.436050] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0715e2a4-c667-4c4a-bdfb-dc5d0e24c7a8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.443587] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f391cc-0e18-4f86-9a99-9a0835f53000 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.472376] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d38ef366-4ba9-478c-ae09-961e106bfb08 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.479409] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-018bfbe0-e155-407b-95a7-1c07c6250fe2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.492015] env[61663]: DEBUG nova.compute.provider_tree [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2267.500715] env[61663]: DEBUG nova.scheduler.client.report [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2267.516613] env[61663]: DEBUG oslo_concurrency.lockutils [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.317s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2267.517258] env[61663]: DEBUG nova.compute.manager [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2267.551992] env[61663]: DEBUG nova.compute.utils [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2267.553145] env[61663]: DEBUG nova.compute.manager [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2267.553384] env[61663]: DEBUG nova.network.neutron [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2267.561340] env[61663]: DEBUG nova.compute.manager [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2267.636980] env[61663]: DEBUG nova.compute.manager [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2267.640968] env[61663]: DEBUG nova.policy [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1b47d6be1f88423099315c96105ebae5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b78db8dd3ba544fe840260d9d0ce34da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 2267.663435] env[61663]: DEBUG nova.virt.hardware [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2267.663704] env[61663]: DEBUG nova.virt.hardware [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2267.663847] env[61663]: DEBUG nova.virt.hardware [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2267.664044] env[61663]: DEBUG nova.virt.hardware [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2267.664299] env[61663]: DEBUG nova.virt.hardware [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2267.664382] env[61663]: DEBUG nova.virt.hardware [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2267.664662] env[61663]: DEBUG nova.virt.hardware [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2267.664935] env[61663]: DEBUG nova.virt.hardware [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2267.665148] env[61663]: DEBUG nova.virt.hardware [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2267.665319] env[61663]: DEBUG nova.virt.hardware [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2267.665498] env[61663]: DEBUG nova.virt.hardware [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2267.666380] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd085d5-f829-41ab-b325-dd583cc332a1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.674559] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d255b6-b1cb-44ab-bca3-423fb28d4e9e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.716183] env[61663]: DEBUG oslo_concurrency.lockutils [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Acquiring lock "0adee33d-8d0c-4bcf-8df4-11465be00485" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2267.716502] env[61663]: DEBUG oslo_concurrency.lockutils [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Lock "0adee33d-8d0c-4bcf-8df4-11465be00485" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2268.056619] env[61663]: DEBUG nova.network.neutron [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Successfully created port: 5c23d488-0837-47cd-b717-14244da20b8f {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2268.729178] env[61663]: DEBUG nova.compute.manager [req-42ce802d-d1fc-4aa3-a760-94ba8aa80a6f req-0754f538-9167-4d5b-8bfe-5c896db233e5 service nova] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Received event network-vif-plugged-5c23d488-0837-47cd-b717-14244da20b8f {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2268.729442] env[61663]: DEBUG oslo_concurrency.lockutils [req-42ce802d-d1fc-4aa3-a760-94ba8aa80a6f req-0754f538-9167-4d5b-8bfe-5c896db233e5 service nova] Acquiring lock "c21a5af5-004b-4544-bcf0-f105d6f336c9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2268.729674] env[61663]: DEBUG oslo_concurrency.lockutils [req-42ce802d-d1fc-4aa3-a760-94ba8aa80a6f req-0754f538-9167-4d5b-8bfe-5c896db233e5 service nova] Lock "c21a5af5-004b-4544-bcf0-f105d6f336c9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2268.729980] env[61663]: DEBUG oslo_concurrency.lockutils [req-42ce802d-d1fc-4aa3-a760-94ba8aa80a6f req-0754f538-9167-4d5b-8bfe-5c896db233e5 service nova] Lock "c21a5af5-004b-4544-bcf0-f105d6f336c9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2268.730175] env[61663]: DEBUG nova.compute.manager [req-42ce802d-d1fc-4aa3-a760-94ba8aa80a6f req-0754f538-9167-4d5b-8bfe-5c896db233e5 service nova] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] No waiting events found dispatching network-vif-plugged-5c23d488-0837-47cd-b717-14244da20b8f {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2268.730347] env[61663]: WARNING nova.compute.manager [req-42ce802d-d1fc-4aa3-a760-94ba8aa80a6f req-0754f538-9167-4d5b-8bfe-5c896db233e5 service nova] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Received unexpected event network-vif-plugged-5c23d488-0837-47cd-b717-14244da20b8f for instance with vm_state building and task_state spawning. [ 2268.786032] env[61663]: DEBUG nova.network.neutron [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Successfully updated port: 5c23d488-0837-47cd-b717-14244da20b8f {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2268.798116] env[61663]: DEBUG oslo_concurrency.lockutils [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Acquiring lock "refresh_cache-c21a5af5-004b-4544-bcf0-f105d6f336c9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2268.798867] env[61663]: DEBUG oslo_concurrency.lockutils [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Acquired lock "refresh_cache-c21a5af5-004b-4544-bcf0-f105d6f336c9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2268.798867] env[61663]: DEBUG nova.network.neutron [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2268.899464] env[61663]: DEBUG nova.network.neutron [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2269.178860] env[61663]: DEBUG nova.network.neutron [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Updating instance_info_cache with network_info: [{"id": "5c23d488-0837-47cd-b717-14244da20b8f", "address": "fa:16:3e:c7:03:27", "network": {"id": "7e106604-3834-438a-b6cc-aa23a9c637a6", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1265493516-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b78db8dd3ba544fe840260d9d0ce34da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c23d488-08", "ovs_interfaceid": "5c23d488-0837-47cd-b717-14244da20b8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2269.192228] env[61663]: DEBUG oslo_concurrency.lockutils [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Releasing lock "refresh_cache-c21a5af5-004b-4544-bcf0-f105d6f336c9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2269.192525] env[61663]: DEBUG nova.compute.manager [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Instance network_info: |[{"id": "5c23d488-0837-47cd-b717-14244da20b8f", "address": "fa:16:3e:c7:03:27", "network": {"id": "7e106604-3834-438a-b6cc-aa23a9c637a6", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1265493516-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b78db8dd3ba544fe840260d9d0ce34da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c23d488-08", "ovs_interfaceid": "5c23d488-0837-47cd-b717-14244da20b8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2269.192932] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:03:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f4399275-8e92-4448-be9e-d4984e93e89c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5c23d488-0837-47cd-b717-14244da20b8f', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2269.200556] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Creating folder: Project (b78db8dd3ba544fe840260d9d0ce34da). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2269.201096] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dd8003de-c8e2-486e-b1a2-7b277d4203fe {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.211431] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Created folder: Project (b78db8dd3ba544fe840260d9d0ce34da) in parent group-v352575. [ 2269.211621] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Creating folder: Instances. Parent ref: group-v352671. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2269.211841] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-522411ae-35b8-423e-baab-17e0465c7500 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.221262] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Created folder: Instances in parent group-v352671. [ 2269.221489] env[61663]: DEBUG oslo.service.loopingcall [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2269.221664] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2269.221850] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-13386946-8f59-4403-a445-878e2532ffbe {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.239691] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2269.239691] env[61663]: value = "task-1690847" [ 2269.239691] env[61663]: _type = "Task" [ 2269.239691] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2269.247226] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690847, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2269.751085] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690847, 'name': CreateVM_Task, 'duration_secs': 0.284543} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2269.751425] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2269.751841] env[61663]: DEBUG oslo_concurrency.lockutils [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2269.752019] env[61663]: DEBUG oslo_concurrency.lockutils [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2269.752346] env[61663]: DEBUG oslo_concurrency.lockutils [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2269.752586] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3dcba5c-9de3-4ca3-b60d-a44105e042bc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.756685] env[61663]: DEBUG oslo_vmware.api [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Waiting for the task: (returnval){ [ 2269.756685] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5241cddb-0725-a828-74be-53d7d81c26d6" [ 2269.756685] env[61663]: _type = "Task" [ 2269.756685] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2269.763821] env[61663]: DEBUG oslo_vmware.api [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5241cddb-0725-a828-74be-53d7d81c26d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2270.267216] env[61663]: DEBUG oslo_concurrency.lockutils [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2270.267481] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2270.267752] env[61663]: DEBUG oslo_concurrency.lockutils [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2270.753405] env[61663]: DEBUG nova.compute.manager [req-f2b06794-62e0-40d0-bf32-0187cffb2788 req-9370210b-ff17-452b-a0cc-747d87018fe1 service nova] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Received event network-changed-5c23d488-0837-47cd-b717-14244da20b8f {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2270.753668] env[61663]: DEBUG nova.compute.manager [req-f2b06794-62e0-40d0-bf32-0187cffb2788 req-9370210b-ff17-452b-a0cc-747d87018fe1 service nova] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Refreshing instance network info cache due to event network-changed-5c23d488-0837-47cd-b717-14244da20b8f. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2270.753771] env[61663]: DEBUG oslo_concurrency.lockutils [req-f2b06794-62e0-40d0-bf32-0187cffb2788 req-9370210b-ff17-452b-a0cc-747d87018fe1 service nova] Acquiring lock "refresh_cache-c21a5af5-004b-4544-bcf0-f105d6f336c9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2270.753917] env[61663]: DEBUG oslo_concurrency.lockutils [req-f2b06794-62e0-40d0-bf32-0187cffb2788 req-9370210b-ff17-452b-a0cc-747d87018fe1 service nova] Acquired lock "refresh_cache-c21a5af5-004b-4544-bcf0-f105d6f336c9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2270.754260] env[61663]: DEBUG nova.network.neutron [req-f2b06794-62e0-40d0-bf32-0187cffb2788 req-9370210b-ff17-452b-a0cc-747d87018fe1 service nova] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Refreshing network info cache for port 5c23d488-0837-47cd-b717-14244da20b8f {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2270.998382] env[61663]: DEBUG nova.network.neutron [req-f2b06794-62e0-40d0-bf32-0187cffb2788 req-9370210b-ff17-452b-a0cc-747d87018fe1 service nova] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Updated VIF entry in instance network info cache for port 5c23d488-0837-47cd-b717-14244da20b8f. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2270.998738] env[61663]: DEBUG nova.network.neutron [req-f2b06794-62e0-40d0-bf32-0187cffb2788 req-9370210b-ff17-452b-a0cc-747d87018fe1 service nova] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Updating instance_info_cache with network_info: [{"id": "5c23d488-0837-47cd-b717-14244da20b8f", "address": "fa:16:3e:c7:03:27", "network": {"id": "7e106604-3834-438a-b6cc-aa23a9c637a6", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1265493516-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b78db8dd3ba544fe840260d9d0ce34da", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f4399275-8e92-4448-be9e-d4984e93e89c", "external-id": "nsx-vlan-transportzone-192", "segmentation_id": 192, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c23d488-08", "ovs_interfaceid": "5c23d488-0837-47cd-b717-14244da20b8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2271.008209] env[61663]: DEBUG oslo_concurrency.lockutils [req-f2b06794-62e0-40d0-bf32-0187cffb2788 req-9370210b-ff17-452b-a0cc-747d87018fe1 service nova] Releasing lock "refresh_cache-c21a5af5-004b-4544-bcf0-f105d6f336c9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2271.202316] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2271.202316] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2279.687780] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2283.589823] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquiring lock "18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2283.590136] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Lock "18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2293.755469] env[61663]: DEBUG oslo_concurrency.lockutils [None req-daad2c63-2259-418d-8d1e-139537dfebac tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquiring lock "5002feba-1ffc-4957-ad11-712fce784ef4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2293.755772] env[61663]: DEBUG oslo_concurrency.lockutils [None req-daad2c63-2259-418d-8d1e-139537dfebac tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Lock "5002feba-1ffc-4957-ad11-712fce784ef4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2303.616417] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c307fab-5c9e-471b-9e4c-4f9371dadc97 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Acquiring lock "c21a5af5-004b-4544-bcf0-f105d6f336c9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2313.467662] env[61663]: WARNING oslo_vmware.rw_handles [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2313.467662] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2313.467662] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2313.467662] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2313.467662] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2313.467662] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 2313.467662] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2313.467662] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2313.467662] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2313.467662] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2313.467662] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2313.467662] env[61663]: ERROR oslo_vmware.rw_handles [ 2313.468426] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/a485ae3c-ae60-42ee-b348-76d94739fd5a/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2313.470009] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2313.470263] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Copying Virtual Disk [datastore1] vmware_temp/a485ae3c-ae60-42ee-b348-76d94739fd5a/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/a485ae3c-ae60-42ee-b348-76d94739fd5a/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2313.470543] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2acfd06c-c6f1-4384-af5e-03a8ed89b08b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.480399] env[61663]: DEBUG oslo_vmware.api [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Waiting for the task: (returnval){ [ 2313.480399] env[61663]: value = "task-1690848" [ 2313.480399] env[61663]: _type = "Task" [ 2313.480399] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2313.488444] env[61663]: DEBUG oslo_vmware.api [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Task: {'id': task-1690848, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2313.991510] env[61663]: DEBUG oslo_vmware.exceptions [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2313.991847] env[61663]: DEBUG oslo_concurrency.lockutils [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2313.992515] env[61663]: ERROR nova.compute.manager [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2313.992515] env[61663]: Faults: ['InvalidArgument'] [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Traceback (most recent call last): [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] yield resources [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] self.driver.spawn(context, instance, image_meta, [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] self._fetch_image_if_missing(context, vi) [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] image_cache(vi, tmp_image_ds_loc) [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] vm_util.copy_virtual_disk( [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] session._wait_for_task(vmdk_copy_task) [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] return self.wait_for_task(task_ref) [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] return evt.wait() [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] result = hub.switch() [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] return self.greenlet.switch() [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] self.f(*self.args, **self.kw) [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] raise exceptions.translate_fault(task_info.error) [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Faults: ['InvalidArgument'] [ 2313.992515] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] [ 2313.993513] env[61663]: INFO nova.compute.manager [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Terminating instance [ 2313.994604] env[61663]: DEBUG oslo_concurrency.lockutils [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2313.994870] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2313.995148] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3dcd9974-efd0-4345-ad08-ea545edf0a09 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2313.997810] env[61663]: DEBUG nova.compute.manager [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2313.998026] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2313.998856] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37ce295-5ff7-45a9-ae70-b16bff3baac6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.006472] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2314.007526] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bdf79a9a-0fc9-4748-8d95-0cceb09ff1b7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.009055] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2314.009273] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2314.010025] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eae4b3c7-4f88-4b42-9bd9-a096d7caf130 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.015276] env[61663]: DEBUG oslo_vmware.api [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Waiting for the task: (returnval){ [ 2314.015276] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d22cb4-7083-63d3-b58c-cc11bc81c434" [ 2314.015276] env[61663]: _type = "Task" [ 2314.015276] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2314.022861] env[61663]: DEBUG oslo_vmware.api [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d22cb4-7083-63d3-b58c-cc11bc81c434, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2314.084927] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2314.085178] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2314.085362] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Deleting the datastore file [datastore1] b51a331f-2b96-457f-9c9e-99379e8ae7fb {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2314.085623] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26c3f323-d59c-4286-9d7b-94fcb1a294a0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.091901] env[61663]: DEBUG oslo_vmware.api [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Waiting for the task: (returnval){ [ 2314.091901] env[61663]: value = "task-1690850" [ 2314.091901] env[61663]: _type = "Task" [ 2314.091901] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2314.099448] env[61663]: DEBUG oslo_vmware.api [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Task: {'id': task-1690850, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2314.526098] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2314.526098] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Creating directory with path [datastore1] vmware_temp/f55e3fdc-3b85-40fa-b0b6-6476a46e47de/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2314.526098] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3dc80b1e-eff4-4826-bc22-27b6e2ccc405 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.537126] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Created directory with path [datastore1] vmware_temp/f55e3fdc-3b85-40fa-b0b6-6476a46e47de/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2314.537317] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Fetch image to [datastore1] vmware_temp/f55e3fdc-3b85-40fa-b0b6-6476a46e47de/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2314.537485] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/f55e3fdc-3b85-40fa-b0b6-6476a46e47de/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2314.538193] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db8a542-ec02-4060-80f6-bcac8462228b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.544514] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28bcb467-328b-4ab8-b671-a11b8c3caac0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.553049] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-057357c4-7804-41a9-8bcd-b6d4f6afcf71 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.583232] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c27c33a-d624-41cc-9a63-b2286c3e107f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.588308] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4f289d61-6c5b-433a-93bc-ceb386287d27 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.599862] env[61663]: DEBUG oslo_vmware.api [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Task: {'id': task-1690850, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072664} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2314.600091] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2314.600274] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2314.600441] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2314.600612] env[61663]: INFO nova.compute.manager [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2314.602615] env[61663]: DEBUG nova.compute.claims [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2314.602784] env[61663]: DEBUG oslo_concurrency.lockutils [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2314.602992] env[61663]: DEBUG oslo_concurrency.lockutils [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2314.608136] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2314.748823] env[61663]: DEBUG oslo_vmware.rw_handles [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f55e3fdc-3b85-40fa-b0b6-6476a46e47de/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2314.811270] env[61663]: DEBUG oslo_vmware.rw_handles [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2314.812333] env[61663]: DEBUG oslo_vmware.rw_handles [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f55e3fdc-3b85-40fa-b0b6-6476a46e47de/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2314.914724] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcbff274-c139-4e0a-a72a-db93023113ce {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.922348] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-363a4df6-750e-47e6-b200-b524cc8b0034 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.951452] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad488f0-1786-471e-863f-084eede42fd5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.958284] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd71706-5f51-4733-972e-ed0a0d7a7f92 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2314.972057] env[61663]: DEBUG nova.compute.provider_tree [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2314.983034] env[61663]: DEBUG nova.scheduler.client.report [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2314.998836] env[61663]: DEBUG oslo_concurrency.lockutils [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.395s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2314.998836] env[61663]: ERROR nova.compute.manager [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2314.998836] env[61663]: Faults: ['InvalidArgument'] [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Traceback (most recent call last): [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] self.driver.spawn(context, instance, image_meta, [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] self._fetch_image_if_missing(context, vi) [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] image_cache(vi, tmp_image_ds_loc) [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] vm_util.copy_virtual_disk( [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] session._wait_for_task(vmdk_copy_task) [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] return self.wait_for_task(task_ref) [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] return evt.wait() [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] result = hub.switch() [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] return self.greenlet.switch() [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] self.f(*self.args, **self.kw) [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] raise exceptions.translate_fault(task_info.error) [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Faults: ['InvalidArgument'] [ 2314.998836] env[61663]: ERROR nova.compute.manager [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] [ 2314.999816] env[61663]: DEBUG nova.compute.utils [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2315.000603] env[61663]: DEBUG nova.compute.manager [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Build of instance b51a331f-2b96-457f-9c9e-99379e8ae7fb was re-scheduled: A specified parameter was not correct: fileType [ 2315.000603] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2315.001015] env[61663]: DEBUG nova.compute.manager [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2315.001221] env[61663]: DEBUG nova.compute.manager [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2315.001400] env[61663]: DEBUG nova.compute.manager [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2315.001561] env[61663]: DEBUG nova.network.neutron [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2315.429131] env[61663]: DEBUG nova.network.neutron [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2315.443583] env[61663]: INFO nova.compute.manager [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Took 0.44 seconds to deallocate network for instance. [ 2315.536879] env[61663]: INFO nova.scheduler.client.report [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Deleted allocations for instance b51a331f-2b96-457f-9c9e-99379e8ae7fb [ 2315.557068] env[61663]: DEBUG oslo_concurrency.lockutils [None req-08d21e4f-6d7d-42a0-ae71-21bc29daf051 tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Lock "b51a331f-2b96-457f-9c9e-99379e8ae7fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 629.916s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2315.558211] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e1dca3d1-734a-48b0-b149-4cd7009daf3a tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Lock "b51a331f-2b96-457f-9c9e-99379e8ae7fb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 433.645s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2315.558442] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e1dca3d1-734a-48b0-b149-4cd7009daf3a tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Acquiring lock "b51a331f-2b96-457f-9c9e-99379e8ae7fb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2315.558652] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e1dca3d1-734a-48b0-b149-4cd7009daf3a tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Lock "b51a331f-2b96-457f-9c9e-99379e8ae7fb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2315.558848] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e1dca3d1-734a-48b0-b149-4cd7009daf3a tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Lock "b51a331f-2b96-457f-9c9e-99379e8ae7fb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2315.560973] env[61663]: INFO nova.compute.manager [None req-e1dca3d1-734a-48b0-b149-4cd7009daf3a tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Terminating instance [ 2315.562634] env[61663]: DEBUG nova.compute.manager [None req-e1dca3d1-734a-48b0-b149-4cd7009daf3a tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2315.562834] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-e1dca3d1-734a-48b0-b149-4cd7009daf3a tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2315.563326] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b2badb85-8f0c-4f28-a938-3876db3b9c29 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.572908] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e9c77e-d962-4849-9466-42b2cb73bc4e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.583486] env[61663]: DEBUG nova.compute.manager [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2315.603011] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-e1dca3d1-734a-48b0-b149-4cd7009daf3a tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b51a331f-2b96-457f-9c9e-99379e8ae7fb could not be found. [ 2315.603245] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-e1dca3d1-734a-48b0-b149-4cd7009daf3a tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2315.603418] env[61663]: INFO nova.compute.manager [None req-e1dca3d1-734a-48b0-b149-4cd7009daf3a tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2315.603652] env[61663]: DEBUG oslo.service.loopingcall [None req-e1dca3d1-734a-48b0-b149-4cd7009daf3a tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2315.603872] env[61663]: DEBUG nova.compute.manager [-] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2315.603970] env[61663]: DEBUG nova.network.neutron [-] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2315.628777] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2315.629048] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2315.630594] env[61663]: INFO nova.compute.claims [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2315.634042] env[61663]: DEBUG nova.network.neutron [-] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2315.642197] env[61663]: INFO nova.compute.manager [-] [instance: b51a331f-2b96-457f-9c9e-99379e8ae7fb] Took 0.04 seconds to deallocate network for instance. [ 2315.741929] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e1dca3d1-734a-48b0-b149-4cd7009daf3a tempest-ServersNegativeTestJSON-287410550 tempest-ServersNegativeTestJSON-287410550-project-member] Lock "b51a331f-2b96-457f-9c9e-99379e8ae7fb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.184s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2315.874393] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dff6310-96ff-4dbc-886d-74ee5197c8e4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.881901] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ccaf347-15a7-44fb-9ffa-1b274eabfbb5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.910957] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e3f3984-c316-499b-b63a-07ede5de2d30 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.917665] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1092377-f2e6-44d0-a71b-571f56c2134b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.930059] env[61663]: DEBUG nova.compute.provider_tree [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2315.938738] env[61663]: DEBUG nova.scheduler.client.report [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2315.952553] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.323s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2315.953022] env[61663]: DEBUG nova.compute.manager [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2315.986736] env[61663]: DEBUG nova.compute.utils [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2315.988199] env[61663]: DEBUG nova.compute.manager [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2315.988374] env[61663]: DEBUG nova.network.neutron [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2315.997339] env[61663]: DEBUG nova.compute.manager [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2316.058896] env[61663]: DEBUG nova.policy [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f50378c5b326455197df095cae766a35', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '006ceb0b9457465daa8ad6d60e85c1f3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 2316.061985] env[61663]: DEBUG nova.compute.manager [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2316.086480] env[61663]: DEBUG nova.virt.hardware [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2316.086724] env[61663]: DEBUG nova.virt.hardware [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2316.086899] env[61663]: DEBUG nova.virt.hardware [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2316.087165] env[61663]: DEBUG nova.virt.hardware [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2316.087326] env[61663]: DEBUG nova.virt.hardware [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2316.087477] env[61663]: DEBUG nova.virt.hardware [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2316.087687] env[61663]: DEBUG nova.virt.hardware [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2316.087849] env[61663]: DEBUG nova.virt.hardware [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2316.088028] env[61663]: DEBUG nova.virt.hardware [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2316.088202] env[61663]: DEBUG nova.virt.hardware [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2316.088378] env[61663]: DEBUG nova.virt.hardware [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2316.089257] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07706317-c4ab-4a33-86d1-755d94cc4ccd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2316.096705] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec8ba009-3978-41c6-9cc8-4c06df75f523 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2316.536113] env[61663]: DEBUG nova.network.neutron [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Successfully created port: 86504641-ef42-48a7-bc06-6c0ed7c9e72b {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2317.178137] env[61663]: DEBUG nova.network.neutron [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Successfully updated port: 86504641-ef42-48a7-bc06-6c0ed7c9e72b {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2317.193859] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Acquiring lock "refresh_cache-202e0f58-b057-4e57-8a92-c06d6efda570" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2317.194029] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Acquired lock "refresh_cache-202e0f58-b057-4e57-8a92-c06d6efda570" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2317.194185] env[61663]: DEBUG nova.network.neutron [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2317.259333] env[61663]: DEBUG nova.network.neutron [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2317.502486] env[61663]: DEBUG nova.network.neutron [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Updating instance_info_cache with network_info: [{"id": "86504641-ef42-48a7-bc06-6c0ed7c9e72b", "address": "fa:16:3e:75:de:ce", "network": {"id": "0022b0c5-4b0f-4449-a9f5-4008ee2c4e6d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-70195467-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "006ceb0b9457465daa8ad6d60e85c1f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f246b87-f105-4b33-a71d-5caf8e99e074", "external-id": "nsx-vlan-transportzone-583", "segmentation_id": 583, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86504641-ef", "ovs_interfaceid": "86504641-ef42-48a7-bc06-6c0ed7c9e72b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2317.516174] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Releasing lock "refresh_cache-202e0f58-b057-4e57-8a92-c06d6efda570" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2317.516515] env[61663]: DEBUG nova.compute.manager [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Instance network_info: |[{"id": "86504641-ef42-48a7-bc06-6c0ed7c9e72b", "address": "fa:16:3e:75:de:ce", "network": {"id": "0022b0c5-4b0f-4449-a9f5-4008ee2c4e6d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-70195467-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "006ceb0b9457465daa8ad6d60e85c1f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f246b87-f105-4b33-a71d-5caf8e99e074", "external-id": "nsx-vlan-transportzone-583", "segmentation_id": 583, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86504641-ef", "ovs_interfaceid": "86504641-ef42-48a7-bc06-6c0ed7c9e72b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2317.516907] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:de:ce', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f246b87-f105-4b33-a71d-5caf8e99e074', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '86504641-ef42-48a7-bc06-6c0ed7c9e72b', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2317.524454] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Creating folder: Project (006ceb0b9457465daa8ad6d60e85c1f3). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2317.525105] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fe338514-f8c7-4928-b694-eab3103d977a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.537980] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Created folder: Project (006ceb0b9457465daa8ad6d60e85c1f3) in parent group-v352575. [ 2317.538187] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Creating folder: Instances. Parent ref: group-v352674. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2317.538414] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-639e78d4-1341-4378-8d66-53a246c3df6e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.547940] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Created folder: Instances in parent group-v352674. [ 2317.548184] env[61663]: DEBUG oslo.service.loopingcall [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2317.548369] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2317.548561] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7a9bd5d-3c53-4b50-8259-b49ac317b0ca {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2317.565191] env[61663]: DEBUG nova.compute.manager [req-b27eed5f-6f5a-4b2c-a81d-e774ad8f0f57 req-9c8f5f75-bcfc-4c68-a137-1bea0bfd066c service nova] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Received event network-vif-plugged-86504641-ef42-48a7-bc06-6c0ed7c9e72b {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2317.565390] env[61663]: DEBUG oslo_concurrency.lockutils [req-b27eed5f-6f5a-4b2c-a81d-e774ad8f0f57 req-9c8f5f75-bcfc-4c68-a137-1bea0bfd066c service nova] Acquiring lock "202e0f58-b057-4e57-8a92-c06d6efda570-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2317.565649] env[61663]: DEBUG oslo_concurrency.lockutils [req-b27eed5f-6f5a-4b2c-a81d-e774ad8f0f57 req-9c8f5f75-bcfc-4c68-a137-1bea0bfd066c service nova] Lock "202e0f58-b057-4e57-8a92-c06d6efda570-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2317.565776] env[61663]: DEBUG oslo_concurrency.lockutils [req-b27eed5f-6f5a-4b2c-a81d-e774ad8f0f57 req-9c8f5f75-bcfc-4c68-a137-1bea0bfd066c service nova] Lock "202e0f58-b057-4e57-8a92-c06d6efda570-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2317.566450] env[61663]: DEBUG nova.compute.manager [req-b27eed5f-6f5a-4b2c-a81d-e774ad8f0f57 req-9c8f5f75-bcfc-4c68-a137-1bea0bfd066c service nova] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] No waiting events found dispatching network-vif-plugged-86504641-ef42-48a7-bc06-6c0ed7c9e72b {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2317.566450] env[61663]: WARNING nova.compute.manager [req-b27eed5f-6f5a-4b2c-a81d-e774ad8f0f57 req-9c8f5f75-bcfc-4c68-a137-1bea0bfd066c service nova] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Received unexpected event network-vif-plugged-86504641-ef42-48a7-bc06-6c0ed7c9e72b for instance with vm_state building and task_state spawning. [ 2317.566450] env[61663]: DEBUG nova.compute.manager [req-b27eed5f-6f5a-4b2c-a81d-e774ad8f0f57 req-9c8f5f75-bcfc-4c68-a137-1bea0bfd066c service nova] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Received event network-changed-86504641-ef42-48a7-bc06-6c0ed7c9e72b {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2317.566450] env[61663]: DEBUG nova.compute.manager [req-b27eed5f-6f5a-4b2c-a81d-e774ad8f0f57 req-9c8f5f75-bcfc-4c68-a137-1bea0bfd066c service nova] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Refreshing instance network info cache due to event network-changed-86504641-ef42-48a7-bc06-6c0ed7c9e72b. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2317.566816] env[61663]: DEBUG oslo_concurrency.lockutils [req-b27eed5f-6f5a-4b2c-a81d-e774ad8f0f57 req-9c8f5f75-bcfc-4c68-a137-1bea0bfd066c service nova] Acquiring lock "refresh_cache-202e0f58-b057-4e57-8a92-c06d6efda570" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2317.566816] env[61663]: DEBUG oslo_concurrency.lockutils [req-b27eed5f-6f5a-4b2c-a81d-e774ad8f0f57 req-9c8f5f75-bcfc-4c68-a137-1bea0bfd066c service nova] Acquired lock "refresh_cache-202e0f58-b057-4e57-8a92-c06d6efda570" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2317.566816] env[61663]: DEBUG nova.network.neutron [req-b27eed5f-6f5a-4b2c-a81d-e774ad8f0f57 req-9c8f5f75-bcfc-4c68-a137-1bea0bfd066c service nova] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Refreshing network info cache for port 86504641-ef42-48a7-bc06-6c0ed7c9e72b {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2317.573243] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2317.573243] env[61663]: value = "task-1690853" [ 2317.573243] env[61663]: _type = "Task" [ 2317.573243] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2317.583607] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690853, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2318.082841] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690853, 'name': CreateVM_Task, 'duration_secs': 0.308172} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2318.083014] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2318.083679] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2318.083841] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2318.084172] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2318.084414] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62eb6071-7989-4de7-bf06-23aa4b346ba9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2318.088783] env[61663]: DEBUG oslo_vmware.api [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Waiting for the task: (returnval){ [ 2318.088783] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]523315ac-5fb7-9371-4e70-2fe8cb51cf3c" [ 2318.088783] env[61663]: _type = "Task" [ 2318.088783] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2318.096085] env[61663]: DEBUG oslo_vmware.api [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]523315ac-5fb7-9371-4e70-2fe8cb51cf3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2318.155229] env[61663]: DEBUG nova.network.neutron [req-b27eed5f-6f5a-4b2c-a81d-e774ad8f0f57 req-9c8f5f75-bcfc-4c68-a137-1bea0bfd066c service nova] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Updated VIF entry in instance network info cache for port 86504641-ef42-48a7-bc06-6c0ed7c9e72b. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2318.155623] env[61663]: DEBUG nova.network.neutron [req-b27eed5f-6f5a-4b2c-a81d-e774ad8f0f57 req-9c8f5f75-bcfc-4c68-a137-1bea0bfd066c service nova] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Updating instance_info_cache with network_info: [{"id": "86504641-ef42-48a7-bc06-6c0ed7c9e72b", "address": "fa:16:3e:75:de:ce", "network": {"id": "0022b0c5-4b0f-4449-a9f5-4008ee2c4e6d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-70195467-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "006ceb0b9457465daa8ad6d60e85c1f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f246b87-f105-4b33-a71d-5caf8e99e074", "external-id": "nsx-vlan-transportzone-583", "segmentation_id": 583, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86504641-ef", "ovs_interfaceid": "86504641-ef42-48a7-bc06-6c0ed7c9e72b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2318.165681] env[61663]: DEBUG oslo_concurrency.lockutils [req-b27eed5f-6f5a-4b2c-a81d-e774ad8f0f57 req-9c8f5f75-bcfc-4c68-a137-1bea0bfd066c service nova] Releasing lock "refresh_cache-202e0f58-b057-4e57-8a92-c06d6efda570" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2318.599064] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2318.599311] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2318.599436] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2318.691547] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2318.691762] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2323.687684] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2325.693009] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2325.693009] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2325.693009] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2325.717432] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2325.717598] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2325.717727] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2325.717880] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2325.718033] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2325.718165] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2325.718494] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2325.718494] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2325.718594] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2325.718705] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2325.718825] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2325.719315] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2325.893707] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Acquiring lock "5bc71c5f-20d1-47bf-ac1e-9b32c32b198e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2325.893930] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Lock "5bc71c5f-20d1-47bf-ac1e-9b32c32b198e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2326.692139] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2326.692338] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2328.692452] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2328.704389] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2328.704618] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2328.704783] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2328.704937] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2328.706600] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd3d7b7-ca94-4dcc-9acf-e0b9737e4f43 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.715237] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fe6dad0-ea43-40dd-a5c9-7428171a9f0a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.729114] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567713ff-0f1e-435f-a77a-f542e18e9846 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.736029] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d331989-5037-4fa8-af8e-b526a70240ec {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.763908] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181277MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2328.764065] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2328.764254] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2328.836575] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 440175fc-da0c-4ea3-9a74-46e97e32658b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2328.836738] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 668c457f-7ebc-441f-8ece-cc63c571363b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2328.836879] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ae347f45-f39e-47eb-9e37-80ddfc502c27 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2328.837015] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 525749ba-7de2-4ec5-8f7b-1f4c291710fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2328.837145] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 740f7887-4a5c-4889-9635-e9d9c6607ee7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2328.837264] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 54a78c20-cbf6-453b-88e4-2fb4da0a6200 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2328.837382] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 8e705624-9787-4d34-a3d4-f56b7b4fdcc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2328.837497] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance e47c9821-f815-4bd5-bf00-8822f08e3333 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2328.837611] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance c21a5af5-004b-4544-bcf0-f105d6f336c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2328.837722] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 202e0f58-b057-4e57-8a92-c06d6efda570 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2328.848715] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 67f8162d-a631-4f0e-b03c-fd76ee131615 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2328.859615] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance cbd92feb-f2a1-41cf-8552-8cd0b0b20f0a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2328.870235] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 029ec7ad-96a1-42e0-a926-c1aab1de05a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2328.880369] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 0adee33d-8d0c-4bcf-8df4-11465be00485 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2328.889679] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2328.899393] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5002feba-1ffc-4957-ad11-712fce784ef4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2328.908477] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2328.908694] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2328.908842] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2329.095981] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f087100-ad99-4b93-915e-d218a6087785 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.103669] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9691e2-2c15-4f18-8419-8807522efe42 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.147849] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1821e2ca-09ad-46c9-9638-05f2114d202c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.155501] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf014a00-7e9a-48f5-9bd7-663418eddd26 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.168112] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2329.176247] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2329.189366] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2329.189559] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.425s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2329.233496] env[61663]: DEBUG oslo_concurrency.lockutils [None req-27eed43f-f341-42ce-9e01-8799429a272e tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Acquiring lock "202e0f58-b057-4e57-8a92-c06d6efda570" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2333.191287] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2333.191287] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2360.833955] env[61663]: WARNING oslo_vmware.rw_handles [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2360.833955] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2360.833955] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2360.833955] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2360.833955] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2360.833955] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 2360.833955] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2360.833955] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2360.833955] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2360.833955] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2360.833955] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2360.833955] env[61663]: ERROR oslo_vmware.rw_handles [ 2360.834543] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/f55e3fdc-3b85-40fa-b0b6-6476a46e47de/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2360.836419] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2360.836664] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Copying Virtual Disk [datastore1] vmware_temp/f55e3fdc-3b85-40fa-b0b6-6476a46e47de/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/f55e3fdc-3b85-40fa-b0b6-6476a46e47de/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2360.836940] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-41717419-942d-4b54-a430-1798673ed910 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.845611] env[61663]: DEBUG oslo_vmware.api [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Waiting for the task: (returnval){ [ 2360.845611] env[61663]: value = "task-1690854" [ 2360.845611] env[61663]: _type = "Task" [ 2360.845611] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2360.853698] env[61663]: DEBUG oslo_vmware.api [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Task: {'id': task-1690854, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2361.355524] env[61663]: DEBUG oslo_vmware.exceptions [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2361.355892] env[61663]: DEBUG oslo_concurrency.lockutils [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2361.356492] env[61663]: ERROR nova.compute.manager [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2361.356492] env[61663]: Faults: ['InvalidArgument'] [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Traceback (most recent call last): [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] yield resources [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] self.driver.spawn(context, instance, image_meta, [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] self._fetch_image_if_missing(context, vi) [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] image_cache(vi, tmp_image_ds_loc) [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] vm_util.copy_virtual_disk( [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] session._wait_for_task(vmdk_copy_task) [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] return self.wait_for_task(task_ref) [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] return evt.wait() [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] result = hub.switch() [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] return self.greenlet.switch() [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] self.f(*self.args, **self.kw) [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] raise exceptions.translate_fault(task_info.error) [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Faults: ['InvalidArgument'] [ 2361.356492] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] [ 2361.357435] env[61663]: INFO nova.compute.manager [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Terminating instance [ 2361.358468] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2361.358681] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2361.358923] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-68917d72-9a1a-4aa0-91fd-0fc6801ac1a5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.361269] env[61663]: DEBUG nova.compute.manager [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2361.361466] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2361.362230] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95204aa2-ddc9-41cc-8a6e-b1b713cfc4b2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.369009] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2361.369979] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4f948361-73d9-4a90-9222-07c467573169 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.371363] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2361.371535] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2361.372194] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95aec09d-b47e-4e7c-832d-ced129eb8cb7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.377512] env[61663]: DEBUG oslo_vmware.api [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Waiting for the task: (returnval){ [ 2361.377512] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5276fac1-98b1-1b24-d672-c037e52bda4c" [ 2361.377512] env[61663]: _type = "Task" [ 2361.377512] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2361.384894] env[61663]: DEBUG oslo_vmware.api [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5276fac1-98b1-1b24-d672-c037e52bda4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2361.575162] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2361.575348] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2361.575530] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Deleting the datastore file [datastore1] 440175fc-da0c-4ea3-9a74-46e97e32658b {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2361.575792] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-505490e2-57e9-4ee5-bb78-777bc9ad57bd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.582455] env[61663]: DEBUG oslo_vmware.api [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Waiting for the task: (returnval){ [ 2361.582455] env[61663]: value = "task-1690856" [ 2361.582455] env[61663]: _type = "Task" [ 2361.582455] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2361.589624] env[61663]: DEBUG oslo_vmware.api [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Task: {'id': task-1690856, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2361.888710] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2361.888710] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Creating directory with path [datastore1] vmware_temp/a1845394-68d1-421f-80d2-bed3a08eec7a/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2361.888710] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-06a38434-e315-44df-b72b-55c4a25533bd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.907877] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Created directory with path [datastore1] vmware_temp/a1845394-68d1-421f-80d2-bed3a08eec7a/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2361.908130] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Fetch image to [datastore1] vmware_temp/a1845394-68d1-421f-80d2-bed3a08eec7a/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2361.908278] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/a1845394-68d1-421f-80d2-bed3a08eec7a/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2361.909190] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9db6fbba-78ca-42e3-9445-d2a10d813b7a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.916703] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b7597d2-bb61-4f89-87f7-1d0d5a7e105c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.926459] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ef4ba4-13ed-468b-9279-f408da7f6c1b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.958986] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978fb5ef-4051-4803-80d0-5223a9179881 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.965573] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7fc514c6-9506-4bc6-a86b-70c87494743c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.987242] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2362.042019] env[61663]: DEBUG oslo_vmware.rw_handles [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a1845394-68d1-421f-80d2-bed3a08eec7a/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2362.104756] env[61663]: DEBUG oslo_vmware.rw_handles [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2362.104951] env[61663]: DEBUG oslo_vmware.rw_handles [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a1845394-68d1-421f-80d2-bed3a08eec7a/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2362.108824] env[61663]: DEBUG oslo_vmware.api [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Task: {'id': task-1690856, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097544} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2362.109095] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2362.109287] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2362.109479] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2362.109656] env[61663]: INFO nova.compute.manager [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Took 0.75 seconds to destroy the instance on the hypervisor. [ 2362.111877] env[61663]: DEBUG nova.compute.claims [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2362.112075] env[61663]: DEBUG oslo_concurrency.lockutils [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2362.112305] env[61663]: DEBUG oslo_concurrency.lockutils [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2362.357255] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe630695-2d3e-4c91-929b-4d61ff51fde2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.364765] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d144d80f-fb03-49d9-b615-22796c7c5a4a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.397268] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec512ae6-4b39-43ef-bf4b-6499d834f63c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.404469] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad32124-ab7a-4e9b-982f-1c873706ee03 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.417863] env[61663]: DEBUG nova.compute.provider_tree [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2362.427367] env[61663]: DEBUG nova.scheduler.client.report [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2362.441575] env[61663]: DEBUG oslo_concurrency.lockutils [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.329s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2362.442126] env[61663]: ERROR nova.compute.manager [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2362.442126] env[61663]: Faults: ['InvalidArgument'] [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Traceback (most recent call last): [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] self.driver.spawn(context, instance, image_meta, [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] self._fetch_image_if_missing(context, vi) [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] image_cache(vi, tmp_image_ds_loc) [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] vm_util.copy_virtual_disk( [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] session._wait_for_task(vmdk_copy_task) [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] return self.wait_for_task(task_ref) [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] return evt.wait() [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] result = hub.switch() [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] return self.greenlet.switch() [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] self.f(*self.args, **self.kw) [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] raise exceptions.translate_fault(task_info.error) [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Faults: ['InvalidArgument'] [ 2362.442126] env[61663]: ERROR nova.compute.manager [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] [ 2362.443221] env[61663]: DEBUG nova.compute.utils [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2362.444254] env[61663]: DEBUG nova.compute.manager [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Build of instance 440175fc-da0c-4ea3-9a74-46e97e32658b was re-scheduled: A specified parameter was not correct: fileType [ 2362.444254] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2362.444680] env[61663]: DEBUG nova.compute.manager [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2362.444869] env[61663]: DEBUG nova.compute.manager [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2362.445075] env[61663]: DEBUG nova.compute.manager [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2362.445249] env[61663]: DEBUG nova.network.neutron [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2362.788779] env[61663]: DEBUG nova.network.neutron [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2362.800656] env[61663]: INFO nova.compute.manager [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Took 0.36 seconds to deallocate network for instance. [ 2362.908457] env[61663]: INFO nova.scheduler.client.report [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Deleted allocations for instance 440175fc-da0c-4ea3-9a74-46e97e32658b [ 2362.933705] env[61663]: DEBUG oslo_concurrency.lockutils [None req-23250f42-ebcd-4e79-9d38-8e58ef38397d tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Lock "440175fc-da0c-4ea3-9a74-46e97e32658b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 629.285s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2362.935254] env[61663]: DEBUG oslo_concurrency.lockutils [None req-54ab643d-05e4-4478-b35d-b4ad59e9b85f tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Lock "440175fc-da0c-4ea3-9a74-46e97e32658b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 433.321s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2362.936026] env[61663]: DEBUG oslo_concurrency.lockutils [None req-54ab643d-05e4-4478-b35d-b4ad59e9b85f tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Acquiring lock "440175fc-da0c-4ea3-9a74-46e97e32658b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2362.936026] env[61663]: DEBUG oslo_concurrency.lockutils [None req-54ab643d-05e4-4478-b35d-b4ad59e9b85f tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Lock "440175fc-da0c-4ea3-9a74-46e97e32658b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2362.936026] env[61663]: DEBUG oslo_concurrency.lockutils [None req-54ab643d-05e4-4478-b35d-b4ad59e9b85f tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Lock "440175fc-da0c-4ea3-9a74-46e97e32658b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2362.938308] env[61663]: INFO nova.compute.manager [None req-54ab643d-05e4-4478-b35d-b4ad59e9b85f tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Terminating instance [ 2362.940428] env[61663]: DEBUG nova.compute.manager [None req-54ab643d-05e4-4478-b35d-b4ad59e9b85f tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2362.940822] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-54ab643d-05e4-4478-b35d-b4ad59e9b85f tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2362.941042] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-38cf47b8-a142-4110-bdac-b9e4f5fb9d4e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.945166] env[61663]: DEBUG nova.compute.manager [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2362.951626] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd8d03b3-5263-4510-adb2-876b8d1d4755 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2362.981369] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-54ab643d-05e4-4478-b35d-b4ad59e9b85f tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 440175fc-da0c-4ea3-9a74-46e97e32658b could not be found. [ 2362.981591] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-54ab643d-05e4-4478-b35d-b4ad59e9b85f tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2362.981804] env[61663]: INFO nova.compute.manager [None req-54ab643d-05e4-4478-b35d-b4ad59e9b85f tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2362.982478] env[61663]: DEBUG oslo.service.loopingcall [None req-54ab643d-05e4-4478-b35d-b4ad59e9b85f tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2362.982478] env[61663]: DEBUG nova.compute.manager [-] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2362.982478] env[61663]: DEBUG nova.network.neutron [-] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2363.003339] env[61663]: DEBUG oslo_concurrency.lockutils [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2363.003744] env[61663]: DEBUG oslo_concurrency.lockutils [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2363.006049] env[61663]: INFO nova.compute.claims [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2363.019755] env[61663]: DEBUG nova.network.neutron [-] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2363.037022] env[61663]: INFO nova.compute.manager [-] [instance: 440175fc-da0c-4ea3-9a74-46e97e32658b] Took 0.05 seconds to deallocate network for instance. [ 2363.144910] env[61663]: DEBUG oslo_concurrency.lockutils [None req-54ab643d-05e4-4478-b35d-b4ad59e9b85f tempest-VolumesAdminNegativeTest-562078281 tempest-VolumesAdminNegativeTest-562078281-project-member] Lock "440175fc-da0c-4ea3-9a74-46e97e32658b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.210s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2363.333566] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c2cf9f4-7a05-4c15-8949-17dec13370c2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.344538] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97589eb3-4b83-43bd-8dd8-10d7903e1246 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.395223] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77983c03-f5b1-49a9-a249-4b1345209583 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.406606] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38dec720-61c7-4636-bc2e-65ac9bced1d2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.428297] env[61663]: DEBUG nova.compute.provider_tree [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2363.442329] env[61663]: DEBUG nova.scheduler.client.report [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2363.458471] env[61663]: DEBUG oslo_concurrency.lockutils [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.455s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2363.459328] env[61663]: DEBUG nova.compute.manager [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2363.519830] env[61663]: DEBUG nova.compute.utils [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2363.521233] env[61663]: DEBUG nova.compute.manager [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2363.521401] env[61663]: DEBUG nova.network.neutron [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2363.532427] env[61663]: DEBUG nova.compute.manager [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2363.589267] env[61663]: DEBUG nova.policy [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2a63b25b9a4f480fba63cfe5f1a7d1dc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7468e2e2e93447aa891442c977611a85', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 2363.599247] env[61663]: DEBUG nova.compute.manager [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2363.625449] env[61663]: DEBUG nova.virt.hardware [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2363.625704] env[61663]: DEBUG nova.virt.hardware [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2363.625865] env[61663]: DEBUG nova.virt.hardware [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2363.626062] env[61663]: DEBUG nova.virt.hardware [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2363.626217] env[61663]: DEBUG nova.virt.hardware [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2363.626369] env[61663]: DEBUG nova.virt.hardware [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2363.626576] env[61663]: DEBUG nova.virt.hardware [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2363.626739] env[61663]: DEBUG nova.virt.hardware [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2363.626912] env[61663]: DEBUG nova.virt.hardware [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2363.627098] env[61663]: DEBUG nova.virt.hardware [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2363.627280] env[61663]: DEBUG nova.virt.hardware [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2363.628178] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d00032-593c-455c-bc47-7a58a1416570 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2363.636940] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9b14e4-0744-4313-bbff-e13482371558 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2364.314120] env[61663]: DEBUG nova.network.neutron [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Successfully created port: 59b07ef8-2d1c-4666-acb1-8705c907b090 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2364.995669] env[61663]: DEBUG nova.compute.manager [req-2b487244-29d7-4eea-a953-44f2a081a855 req-eb25515f-b17b-4198-96ee-9e3679333aee service nova] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Received event network-vif-plugged-59b07ef8-2d1c-4666-acb1-8705c907b090 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2364.995918] env[61663]: DEBUG oslo_concurrency.lockutils [req-2b487244-29d7-4eea-a953-44f2a081a855 req-eb25515f-b17b-4198-96ee-9e3679333aee service nova] Acquiring lock "67f8162d-a631-4f0e-b03c-fd76ee131615-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2364.996124] env[61663]: DEBUG oslo_concurrency.lockutils [req-2b487244-29d7-4eea-a953-44f2a081a855 req-eb25515f-b17b-4198-96ee-9e3679333aee service nova] Lock "67f8162d-a631-4f0e-b03c-fd76ee131615-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2364.996339] env[61663]: DEBUG oslo_concurrency.lockutils [req-2b487244-29d7-4eea-a953-44f2a081a855 req-eb25515f-b17b-4198-96ee-9e3679333aee service nova] Lock "67f8162d-a631-4f0e-b03c-fd76ee131615-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2364.996516] env[61663]: DEBUG nova.compute.manager [req-2b487244-29d7-4eea-a953-44f2a081a855 req-eb25515f-b17b-4198-96ee-9e3679333aee service nova] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] No waiting events found dispatching network-vif-plugged-59b07ef8-2d1c-4666-acb1-8705c907b090 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2364.996682] env[61663]: WARNING nova.compute.manager [req-2b487244-29d7-4eea-a953-44f2a081a855 req-eb25515f-b17b-4198-96ee-9e3679333aee service nova] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Received unexpected event network-vif-plugged-59b07ef8-2d1c-4666-acb1-8705c907b090 for instance with vm_state building and task_state spawning. [ 2365.075944] env[61663]: DEBUG nova.network.neutron [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Successfully updated port: 59b07ef8-2d1c-4666-acb1-8705c907b090 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2365.088620] env[61663]: DEBUG oslo_concurrency.lockutils [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Acquiring lock "refresh_cache-67f8162d-a631-4f0e-b03c-fd76ee131615" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2365.089343] env[61663]: DEBUG oslo_concurrency.lockutils [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Acquired lock "refresh_cache-67f8162d-a631-4f0e-b03c-fd76ee131615" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2365.089343] env[61663]: DEBUG nova.network.neutron [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2365.137706] env[61663]: DEBUG nova.network.neutron [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2365.366043] env[61663]: DEBUG nova.network.neutron [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Updating instance_info_cache with network_info: [{"id": "59b07ef8-2d1c-4666-acb1-8705c907b090", "address": "fa:16:3e:1d:c2:c3", "network": {"id": "962f87e5-49fe-43c0-ba2d-235091983c64", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1077916989-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7468e2e2e93447aa891442c977611a85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59b07ef8-2d", "ovs_interfaceid": "59b07ef8-2d1c-4666-acb1-8705c907b090", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2365.378166] env[61663]: DEBUG oslo_concurrency.lockutils [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Releasing lock "refresh_cache-67f8162d-a631-4f0e-b03c-fd76ee131615" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2365.378454] env[61663]: DEBUG nova.compute.manager [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Instance network_info: |[{"id": "59b07ef8-2d1c-4666-acb1-8705c907b090", "address": "fa:16:3e:1d:c2:c3", "network": {"id": "962f87e5-49fe-43c0-ba2d-235091983c64", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1077916989-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7468e2e2e93447aa891442c977611a85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59b07ef8-2d", "ovs_interfaceid": "59b07ef8-2d1c-4666-acb1-8705c907b090", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2365.378869] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:c2:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1bf71001-973b-4fda-b804-ee6abcd12776', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '59b07ef8-2d1c-4666-acb1-8705c907b090', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2365.386820] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Creating folder: Project (7468e2e2e93447aa891442c977611a85). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2365.387365] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-66f67681-88a8-49b5-9335-8647143c0797 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.399868] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Created folder: Project (7468e2e2e93447aa891442c977611a85) in parent group-v352575. [ 2365.400069] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Creating folder: Instances. Parent ref: group-v352677. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2365.400306] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-314f817b-2639-415c-b2d4-cdd0d3e80047 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.412047] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Created folder: Instances in parent group-v352677. [ 2365.412047] env[61663]: DEBUG oslo.service.loopingcall [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2365.412047] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2365.412047] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6c86ca0d-fcbf-422e-9b87-30755b11c880 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.430870] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2365.430870] env[61663]: value = "task-1690859" [ 2365.430870] env[61663]: _type = "Task" [ 2365.430870] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2365.439788] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690859, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2365.940914] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690859, 'name': CreateVM_Task} progress is 99%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2366.441989] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690859, 'name': CreateVM_Task} progress is 99%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2366.942244] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690859, 'name': CreateVM_Task, 'duration_secs': 1.022133} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2366.942408] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2366.943080] env[61663]: DEBUG oslo_concurrency.lockutils [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2366.943267] env[61663]: DEBUG oslo_concurrency.lockutils [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2366.943597] env[61663]: DEBUG oslo_concurrency.lockutils [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2366.943846] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f12d4b91-3f51-4d09-8d1a-0f6eb983c5cf {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2366.947962] env[61663]: DEBUG oslo_vmware.api [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Waiting for the task: (returnval){ [ 2366.947962] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b6e36a-ff19-0f2f-fe42-e0860ade8fcf" [ 2366.947962] env[61663]: _type = "Task" [ 2366.947962] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2366.954932] env[61663]: DEBUG oslo_vmware.api [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b6e36a-ff19-0f2f-fe42-e0860ade8fcf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2367.021921] env[61663]: DEBUG nova.compute.manager [req-ccb1e1e3-959b-4fc4-8b39-5b7a481846be req-2fb4bde0-25bd-4845-8033-40645d5281ec service nova] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Received event network-changed-59b07ef8-2d1c-4666-acb1-8705c907b090 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2367.022161] env[61663]: DEBUG nova.compute.manager [req-ccb1e1e3-959b-4fc4-8b39-5b7a481846be req-2fb4bde0-25bd-4845-8033-40645d5281ec service nova] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Refreshing instance network info cache due to event network-changed-59b07ef8-2d1c-4666-acb1-8705c907b090. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2367.022483] env[61663]: DEBUG oslo_concurrency.lockutils [req-ccb1e1e3-959b-4fc4-8b39-5b7a481846be req-2fb4bde0-25bd-4845-8033-40645d5281ec service nova] Acquiring lock "refresh_cache-67f8162d-a631-4f0e-b03c-fd76ee131615" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2367.022718] env[61663]: DEBUG oslo_concurrency.lockutils [req-ccb1e1e3-959b-4fc4-8b39-5b7a481846be req-2fb4bde0-25bd-4845-8033-40645d5281ec service nova] Acquired lock "refresh_cache-67f8162d-a631-4f0e-b03c-fd76ee131615" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2367.022952] env[61663]: DEBUG nova.network.neutron [req-ccb1e1e3-959b-4fc4-8b39-5b7a481846be req-2fb4bde0-25bd-4845-8033-40645d5281ec service nova] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Refreshing network info cache for port 59b07ef8-2d1c-4666-acb1-8705c907b090 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2367.313357] env[61663]: DEBUG nova.network.neutron [req-ccb1e1e3-959b-4fc4-8b39-5b7a481846be req-2fb4bde0-25bd-4845-8033-40645d5281ec service nova] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Updated VIF entry in instance network info cache for port 59b07ef8-2d1c-4666-acb1-8705c907b090. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2367.313771] env[61663]: DEBUG nova.network.neutron [req-ccb1e1e3-959b-4fc4-8b39-5b7a481846be req-2fb4bde0-25bd-4845-8033-40645d5281ec service nova] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Updating instance_info_cache with network_info: [{"id": "59b07ef8-2d1c-4666-acb1-8705c907b090", "address": "fa:16:3e:1d:c2:c3", "network": {"id": "962f87e5-49fe-43c0-ba2d-235091983c64", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1077916989-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7468e2e2e93447aa891442c977611a85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1bf71001-973b-4fda-b804-ee6abcd12776", "external-id": "nsx-vlan-transportzone-498", "segmentation_id": 498, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59b07ef8-2d", "ovs_interfaceid": "59b07ef8-2d1c-4666-acb1-8705c907b090", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2367.323520] env[61663]: DEBUG oslo_concurrency.lockutils [req-ccb1e1e3-959b-4fc4-8b39-5b7a481846be req-2fb4bde0-25bd-4845-8033-40645d5281ec service nova] Releasing lock "refresh_cache-67f8162d-a631-4f0e-b03c-fd76ee131615" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2367.458498] env[61663]: DEBUG oslo_concurrency.lockutils [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2367.458774] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2367.458974] env[61663]: DEBUG oslo_concurrency.lockutils [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2371.036441] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c3f760b-67fe-46e9-8adb-c8d0d9924aa5 tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Acquiring lock "67f8162d-a631-4f0e-b03c-fd76ee131615" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2373.115567] env[61663]: DEBUG oslo_concurrency.lockutils [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Acquiring lock "8fc36ed9-9315-4bdb-b4f3-248106a3c681" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2373.115567] env[61663]: DEBUG oslo_concurrency.lockutils [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Lock "8fc36ed9-9315-4bdb-b4f3-248106a3c681" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2377.064784] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2377.065227] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Getting list of instances from cluster (obj){ [ 2377.065227] env[61663]: value = "domain-c8" [ 2377.065227] env[61663]: _type = "ClusterComputeResource" [ 2377.065227] env[61663]: } {{(pid=61663) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2377.066283] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ab49a86-74a0-4679-8f44-f2da924d0acc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2377.085681] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Got total of 10 instances {{(pid=61663) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2378.731715] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2378.732121] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2384.689491] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2386.693060] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2386.693060] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2386.693060] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2386.714115] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2386.714265] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2386.714394] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2386.714518] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2386.714642] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2386.714765] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2386.714883] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2386.715009] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2386.715135] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2386.715250] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2386.715365] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2386.715788] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2386.715985] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2387.698621] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2388.691975] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2388.692398] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2388.706230] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2388.706537] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2388.706623] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2388.706763] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2388.707866] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb6dc0d7-7900-4d4d-b508-e63a2180af8d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2388.716341] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f370bc22-3638-4baa-adab-6ab2c2399bf2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2388.729617] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a46068cd-d392-4469-87b2-d141917d0fdd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2388.735528] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78fa0250-dc7b-4b23-adc5-501edf154860 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2388.764575] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181316MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2388.764724] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2388.764910] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2388.881861] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 668c457f-7ebc-441f-8ece-cc63c571363b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2388.881861] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance ae347f45-f39e-47eb-9e37-80ddfc502c27 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2388.881861] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 525749ba-7de2-4ec5-8f7b-1f4c291710fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2388.881861] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 740f7887-4a5c-4889-9635-e9d9c6607ee7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2388.881861] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 54a78c20-cbf6-453b-88e4-2fb4da0a6200 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2388.881861] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 8e705624-9787-4d34-a3d4-f56b7b4fdcc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2388.881861] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance e47c9821-f815-4bd5-bf00-8822f08e3333 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2388.881861] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance c21a5af5-004b-4544-bcf0-f105d6f336c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2388.881861] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 202e0f58-b057-4e57-8a92-c06d6efda570 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2388.881861] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 67f8162d-a631-4f0e-b03c-fd76ee131615 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2388.896186] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 029ec7ad-96a1-42e0-a926-c1aab1de05a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2388.907500] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 0adee33d-8d0c-4bcf-8df4-11465be00485 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2388.916082] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2388.926024] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5002feba-1ffc-4957-ad11-712fce784ef4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2388.934884] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2388.944242] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 8fc36ed9-9315-4bdb-b4f3-248106a3c681 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2388.944438] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2388.944588] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2388.960427] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Refreshing inventories for resource provider b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2388.975050] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Updating ProviderTree inventory for provider b47d006d-a9bd-461e-a5d9-39811f005278 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2388.975174] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Updating inventory in ProviderTree for provider b47d006d-a9bd-461e-a5d9-39811f005278 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2388.985856] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Refreshing aggregate associations for resource provider b47d006d-a9bd-461e-a5d9-39811f005278, aggregates: None {{(pid=61663) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2389.003905] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Refreshing trait associations for resource provider b47d006d-a9bd-461e-a5d9-39811f005278, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61663) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2389.168313] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca72a7eb-0d04-442f-885e-549bc896dd3a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.175592] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d7c4c64-bc65-4c18-9945-26d86935bfe5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.206243] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdfbee98-9ac9-43e0-8e6e-9f1e0be5eed1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.213307] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89bbf222-6cda-4ff5-9e06-3572af454683 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2389.225901] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2389.235643] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2389.250483] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2389.250720] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.486s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2389.692659] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2389.692832] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Cleaning up deleted instances {{(pid=61663) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 2389.701822] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] There are 0 instances to clean {{(pid=61663) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 2390.692634] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2390.692883] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Cleaning up deleted instances with incomplete migration {{(pid=61663) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 2393.702969] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2393.703278] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2399.688910] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2408.498059] env[61663]: WARNING oslo_vmware.rw_handles [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2408.498059] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2408.498059] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2408.498059] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2408.498059] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2408.498059] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 2408.498059] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2408.498059] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2408.498059] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2408.498059] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2408.498059] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2408.498059] env[61663]: ERROR oslo_vmware.rw_handles [ 2408.498854] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/a1845394-68d1-421f-80d2-bed3a08eec7a/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2408.500676] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2408.500922] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Copying Virtual Disk [datastore1] vmware_temp/a1845394-68d1-421f-80d2-bed3a08eec7a/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/a1845394-68d1-421f-80d2-bed3a08eec7a/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2408.501242] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ceb08463-2e99-4855-b068-260d0d1a83b9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.510947] env[61663]: DEBUG oslo_vmware.api [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Waiting for the task: (returnval){ [ 2408.510947] env[61663]: value = "task-1690860" [ 2408.510947] env[61663]: _type = "Task" [ 2408.510947] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2408.518981] env[61663]: DEBUG oslo_vmware.api [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Task: {'id': task-1690860, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2409.020933] env[61663]: DEBUG oslo_vmware.exceptions [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2409.021202] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2409.021788] env[61663]: ERROR nova.compute.manager [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2409.021788] env[61663]: Faults: ['InvalidArgument'] [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Traceback (most recent call last): [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] yield resources [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] self.driver.spawn(context, instance, image_meta, [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] self._fetch_image_if_missing(context, vi) [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] image_cache(vi, tmp_image_ds_loc) [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] vm_util.copy_virtual_disk( [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] session._wait_for_task(vmdk_copy_task) [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] return self.wait_for_task(task_ref) [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] return evt.wait() [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] result = hub.switch() [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] return self.greenlet.switch() [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] self.f(*self.args, **self.kw) [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] raise exceptions.translate_fault(task_info.error) [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Faults: ['InvalidArgument'] [ 2409.021788] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] [ 2409.022845] env[61663]: INFO nova.compute.manager [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Terminating instance [ 2409.023683] env[61663]: DEBUG oslo_concurrency.lockutils [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2409.023894] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2409.024133] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4753a661-2899-47e9-854b-de858e090656 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.026212] env[61663]: DEBUG nova.compute.manager [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2409.026413] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2409.027120] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb53d74-189f-4f60-8dc8-36f84cb0c1b3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.033785] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2409.034018] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-da8d1d4e-9853-4b81-9f8d-29bb259ea73f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.036058] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2409.036235] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2409.037155] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03cc3256-e896-402a-90a0-bf0d13a094d4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.041596] env[61663]: DEBUG oslo_vmware.api [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Waiting for the task: (returnval){ [ 2409.041596] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c1f03e-6067-d78c-061d-e0121dbb0b32" [ 2409.041596] env[61663]: _type = "Task" [ 2409.041596] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2409.048597] env[61663]: DEBUG oslo_vmware.api [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c1f03e-6067-d78c-061d-e0121dbb0b32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2409.554954] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2409.555284] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Creating directory with path [datastore1] vmware_temp/c0038c6f-672c-42b1-8830-785e1f3a3724/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2409.555875] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce37c51d-cb62-43d1-a8e3-8a8c8f406e6a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.575638] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Created directory with path [datastore1] vmware_temp/c0038c6f-672c-42b1-8830-785e1f3a3724/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2409.575932] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Fetch image to [datastore1] vmware_temp/c0038c6f-672c-42b1-8830-785e1f3a3724/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2409.576129] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/c0038c6f-672c-42b1-8830-785e1f3a3724/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2409.576882] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab258daa-52f7-4c0c-b29c-0631fcb362a7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.583955] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7af6253-e03a-41d6-b6b3-6d0c51a9bae0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.593183] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da012551-647c-4676-b758-7bc127c2c55f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.622838] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2fd523-9d4a-4f9b-ab69-fe412b47b4bb {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.628446] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e2f47981-c812-46f1-9edc-3ec08c78466e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.649899] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2409.773894] env[61663]: DEBUG oslo_concurrency.lockutils [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2409.774722] env[61663]: ERROR nova.compute.manager [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 362c8152-fcd0-4f43-acbf-09a2dc376cb2. [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Traceback (most recent call last): [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] result = getattr(controller, method)(*args, **kwargs) [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self._get(image_id) [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] resp, body = self.http_client.get(url, headers=header) [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self.request(url, 'GET', **kwargs) [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self._handle_response(resp) [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] raise exc.from_response(resp, resp.content) [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] During handling of the above exception, another exception occurred: [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Traceback (most recent call last): [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] yield resources [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] self.driver.spawn(context, instance, image_meta, [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] self._fetch_image_if_missing(context, vi) [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] image_fetch(context, vi, tmp_image_ds_loc) [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] images.fetch_image( [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2409.774722] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] metadata = IMAGE_API.get(context, image_ref) [ 2409.775858] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 2409.775858] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return session.show(context, image_id, [ 2409.775858] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2409.775858] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] _reraise_translated_image_exception(image_id) [ 2409.775858] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 2409.775858] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] raise new_exc.with_traceback(exc_trace) [ 2409.775858] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2409.775858] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2409.775858] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2409.775858] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] result = getattr(controller, method)(*args, **kwargs) [ 2409.775858] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2409.775858] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self._get(image_id) [ 2409.775858] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2409.775858] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2409.775858] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2409.775858] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] resp, body = self.http_client.get(url, headers=header) [ 2409.775858] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2409.775858] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self.request(url, 'GET', **kwargs) [ 2409.775858] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2409.775858] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self._handle_response(resp) [ 2409.775858] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2409.775858] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] raise exc.from_response(resp, resp.content) [ 2409.775858] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] nova.exception.ImageNotAuthorized: Not authorized for image 362c8152-fcd0-4f43-acbf-09a2dc376cb2. [ 2409.775858] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] [ 2409.775858] env[61663]: INFO nova.compute.manager [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Terminating instance [ 2409.776686] env[61663]: DEBUG oslo_concurrency.lockutils [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2409.776799] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2409.777428] env[61663]: DEBUG nova.compute.manager [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2409.777616] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2409.777859] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43eade11-398d-468d-9fb3-231a63410749 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.780616] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3662848a-5abe-4977-8ea0-244f673e4e7a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.787719] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2409.787960] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82f1c96b-3b7a-4b79-b739-0fc1171b8f1f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.790259] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2409.790436] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2409.791388] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccc10ef8-9adf-46f9-83bd-1583af648953 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.796207] env[61663]: DEBUG oslo_vmware.api [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Waiting for the task: (returnval){ [ 2409.796207] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d352a1-4c98-b61a-c722-7adf047cae94" [ 2409.796207] env[61663]: _type = "Task" [ 2409.796207] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2409.803231] env[61663]: DEBUG oslo_vmware.api [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d352a1-4c98-b61a-c722-7adf047cae94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2410.306681] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2410.306954] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Creating directory with path [datastore1] vmware_temp/f99a6fd0-a99b-4545-bcac-8e34b3a1893a/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2410.307207] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1338eb84-5428-490c-82cc-acf4e82f03e4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.334240] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Created directory with path [datastore1] vmware_temp/f99a6fd0-a99b-4545-bcac-8e34b3a1893a/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2410.334438] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Fetch image to [datastore1] vmware_temp/f99a6fd0-a99b-4545-bcac-8e34b3a1893a/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2410.334611] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/f99a6fd0-a99b-4545-bcac-8e34b3a1893a/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2410.335342] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aebf9a56-6d54-443a-b6ef-301e789313b8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.342196] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7872fbc3-915d-49b8-8173-d68b65ed1fab {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.351892] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebbc3b84-a6ee-4a10-b994-1a30be340db7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.380985] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c1a9270-5cc8-47a9-98eb-671012bc16fd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.386507] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d23b3ad9-43e4-4a25-9b12-14826f9fdc6b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.406772] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2410.441789] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2410.442034] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2410.442218] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Deleting the datastore file [datastore1] 668c457f-7ebc-441f-8ece-cc63c571363b {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2410.442475] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-30dce3d2-1b9b-4fc2-a7e5-085ca4fbc1ff {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.449232] env[61663]: DEBUG oslo_vmware.api [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Waiting for the task: (returnval){ [ 2410.449232] env[61663]: value = "task-1690863" [ 2410.449232] env[61663]: _type = "Task" [ 2410.449232] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2410.456591] env[61663]: DEBUG oslo_vmware.api [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Task: {'id': task-1690863, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2410.464094] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2410.464413] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2410.464649] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Deleting the datastore file [datastore1] ae347f45-f39e-47eb-9e37-80ddfc502c27 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2410.464954] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-968e3149-83d5-46c5-929a-ecd66c55ed31 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.471666] env[61663]: DEBUG oslo_vmware.api [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Waiting for the task: (returnval){ [ 2410.471666] env[61663]: value = "task-1690864" [ 2410.471666] env[61663]: _type = "Task" [ 2410.471666] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2410.473395] env[61663]: DEBUG oslo_vmware.rw_handles [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f99a6fd0-a99b-4545-bcac-8e34b3a1893a/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2410.538926] env[61663]: DEBUG oslo_vmware.api [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Task: {'id': task-1690864, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2410.539714] env[61663]: DEBUG oslo_vmware.rw_handles [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2410.539882] env[61663]: DEBUG oslo_vmware.rw_handles [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f99a6fd0-a99b-4545-bcac-8e34b3a1893a/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2410.960218] env[61663]: DEBUG oslo_vmware.api [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Task: {'id': task-1690863, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083855} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2410.960916] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2410.961240] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2410.961545] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2410.961851] env[61663]: INFO nova.compute.manager [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Took 1.94 seconds to destroy the instance on the hypervisor. [ 2410.964851] env[61663]: DEBUG nova.compute.claims [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2410.965170] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2410.965505] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2410.983630] env[61663]: DEBUG oslo_vmware.api [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Task: {'id': task-1690864, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091427} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2410.984127] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2410.984531] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2410.984844] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2410.985146] env[61663]: INFO nova.compute.manager [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Took 1.21 seconds to destroy the instance on the hypervisor. [ 2410.987293] env[61663]: DEBUG nova.compute.claims [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2410.987603] env[61663]: DEBUG oslo_concurrency.lockutils [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2411.194503] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e01f8e6a-0c07-4a02-a715-15f40014ae15 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.202053] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fcad5e5-f8c4-461e-9aed-3815dcad9e55 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.232462] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c4c0f99-4801-4078-9e53-4bb954b8bd5f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.239552] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1dfffb-481d-4457-9015-cad9af9161b3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.252245] env[61663]: DEBUG nova.compute.provider_tree [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2411.261132] env[61663]: DEBUG nova.scheduler.client.report [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2411.276284] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.311s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2411.276780] env[61663]: ERROR nova.compute.manager [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2411.276780] env[61663]: Faults: ['InvalidArgument'] [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Traceback (most recent call last): [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] self.driver.spawn(context, instance, image_meta, [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] self._fetch_image_if_missing(context, vi) [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] image_cache(vi, tmp_image_ds_loc) [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] vm_util.copy_virtual_disk( [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] session._wait_for_task(vmdk_copy_task) [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] return self.wait_for_task(task_ref) [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] return evt.wait() [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] result = hub.switch() [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] return self.greenlet.switch() [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] self.f(*self.args, **self.kw) [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] raise exceptions.translate_fault(task_info.error) [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Faults: ['InvalidArgument'] [ 2411.276780] env[61663]: ERROR nova.compute.manager [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] [ 2411.277978] env[61663]: DEBUG nova.compute.utils [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2411.278482] env[61663]: DEBUG oslo_concurrency.lockutils [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.291s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2411.281360] env[61663]: DEBUG nova.compute.manager [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Build of instance 668c457f-7ebc-441f-8ece-cc63c571363b was re-scheduled: A specified parameter was not correct: fileType [ 2411.281360] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2411.281746] env[61663]: DEBUG nova.compute.manager [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2411.281931] env[61663]: DEBUG nova.compute.manager [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2411.282125] env[61663]: DEBUG nova.compute.manager [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2411.282294] env[61663]: DEBUG nova.network.neutron [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2411.496061] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3f3b07f-5b4f-4558-85f5-b03482549424 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.503772] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa0c909-b365-4271-9746-cff86bcd9ec9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.533893] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91b49ec-2445-41ee-9bea-8e1db1df6ac0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.540961] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d90d81-a9b2-4fc5-8200-86bc0a691fda {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.553694] env[61663]: DEBUG nova.compute.provider_tree [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2411.563320] env[61663]: DEBUG nova.scheduler.client.report [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2411.576372] env[61663]: DEBUG oslo_concurrency.lockutils [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.298s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2411.577084] env[61663]: ERROR nova.compute.manager [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 362c8152-fcd0-4f43-acbf-09a2dc376cb2. [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Traceback (most recent call last): [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] result = getattr(controller, method)(*args, **kwargs) [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self._get(image_id) [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] resp, body = self.http_client.get(url, headers=header) [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self.request(url, 'GET', **kwargs) [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self._handle_response(resp) [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] raise exc.from_response(resp, resp.content) [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] During handling of the above exception, another exception occurred: [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Traceback (most recent call last): [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] self.driver.spawn(context, instance, image_meta, [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] self._fetch_image_if_missing(context, vi) [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] image_fetch(context, vi, tmp_image_ds_loc) [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] images.fetch_image( [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] metadata = IMAGE_API.get(context, image_ref) [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 2411.577084] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return session.show(context, image_id, [ 2411.578037] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2411.578037] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] _reraise_translated_image_exception(image_id) [ 2411.578037] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 2411.578037] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] raise new_exc.with_traceback(exc_trace) [ 2411.578037] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2411.578037] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2411.578037] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2411.578037] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] result = getattr(controller, method)(*args, **kwargs) [ 2411.578037] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2411.578037] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self._get(image_id) [ 2411.578037] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2411.578037] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2411.578037] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2411.578037] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] resp, body = self.http_client.get(url, headers=header) [ 2411.578037] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2411.578037] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self.request(url, 'GET', **kwargs) [ 2411.578037] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2411.578037] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self._handle_response(resp) [ 2411.578037] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2411.578037] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] raise exc.from_response(resp, resp.content) [ 2411.578037] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] nova.exception.ImageNotAuthorized: Not authorized for image 362c8152-fcd0-4f43-acbf-09a2dc376cb2. [ 2411.578037] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] [ 2411.578037] env[61663]: DEBUG nova.compute.utils [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Not authorized for image 362c8152-fcd0-4f43-acbf-09a2dc376cb2. {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2411.579274] env[61663]: DEBUG nova.compute.manager [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Build of instance ae347f45-f39e-47eb-9e37-80ddfc502c27 was re-scheduled: Not authorized for image 362c8152-fcd0-4f43-acbf-09a2dc376cb2. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2411.579728] env[61663]: DEBUG nova.compute.manager [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2411.579899] env[61663]: DEBUG nova.compute.manager [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2411.580097] env[61663]: DEBUG nova.compute.manager [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2411.580308] env[61663]: DEBUG nova.network.neutron [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2411.706656] env[61663]: DEBUG nova.network.neutron [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2411.718376] env[61663]: INFO nova.compute.manager [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Took 0.43 seconds to deallocate network for instance. [ 2411.722121] env[61663]: DEBUG neutronclient.v2_0.client [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61663) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2411.722121] env[61663]: ERROR nova.compute.manager [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Traceback (most recent call last): [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] result = getattr(controller, method)(*args, **kwargs) [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self._get(image_id) [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] resp, body = self.http_client.get(url, headers=header) [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self.request(url, 'GET', **kwargs) [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self._handle_response(resp) [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] raise exc.from_response(resp, resp.content) [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] During handling of the above exception, another exception occurred: [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Traceback (most recent call last): [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] self.driver.spawn(context, instance, image_meta, [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] self._fetch_image_if_missing(context, vi) [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] image_fetch(context, vi, tmp_image_ds_loc) [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] images.fetch_image( [ 2411.722121] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] metadata = IMAGE_API.get(context, image_ref) [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return session.show(context, image_id, [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] _reraise_translated_image_exception(image_id) [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] raise new_exc.with_traceback(exc_trace) [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] result = getattr(controller, method)(*args, **kwargs) [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self._get(image_id) [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] resp, body = self.http_client.get(url, headers=header) [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self.request(url, 'GET', **kwargs) [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self._handle_response(resp) [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] raise exc.from_response(resp, resp.content) [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] nova.exception.ImageNotAuthorized: Not authorized for image 362c8152-fcd0-4f43-acbf-09a2dc376cb2. [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] During handling of the above exception, another exception occurred: [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Traceback (most recent call last): [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] self._build_and_run_instance(context, instance, image, [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/compute/manager.py", line 2739, in _build_and_run_instance [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] raise exception.RescheduledException( [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] nova.exception.RescheduledException: Build of instance ae347f45-f39e-47eb-9e37-80ddfc502c27 was re-scheduled: Not authorized for image 362c8152-fcd0-4f43-acbf-09a2dc376cb2. [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] During handling of the above exception, another exception occurred: [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Traceback (most recent call last): [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] ret = obj(*args, **kwargs) [ 2411.723249] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] exception_handler_v20(status_code, error_body) [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] raise client_exc(message=error_message, [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Neutron server returns request_ids: ['req-4239faa6-96fa-4e89-8ddd-5fbbdb5494ce'] [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] During handling of the above exception, another exception occurred: [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Traceback (most recent call last): [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] self._deallocate_network(context, instance, requested_networks) [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] self.network_api.deallocate_for_instance( [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] data = neutron.list_ports(**search_opts) [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] ret = obj(*args, **kwargs) [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self.list('ports', self.ports_path, retrieve_all, [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] ret = obj(*args, **kwargs) [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] for r in self._pagination(collection, path, **params): [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] res = self.get(path, params=params) [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] ret = obj(*args, **kwargs) [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self.retry_request("GET", action, body=body, [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] ret = obj(*args, **kwargs) [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self.do_request(method, action, body=body, [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] ret = obj(*args, **kwargs) [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] self._handle_fault_response(status_code, replybody, resp) [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 2411.724394] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] raise exception.Unauthorized() [ 2411.725533] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] nova.exception.Unauthorized: Not authorized. [ 2411.725533] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] [ 2411.792522] env[61663]: INFO nova.scheduler.client.report [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Deleted allocations for instance ae347f45-f39e-47eb-9e37-80ddfc502c27 [ 2411.827075] env[61663]: DEBUG oslo_concurrency.lockutils [None req-152297fa-df41-40ac-bf46-f7273100c1d5 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Lock "ae347f45-f39e-47eb-9e37-80ddfc502c27" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 547.017s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2411.828109] env[61663]: INFO nova.scheduler.client.report [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Deleted allocations for instance 668c457f-7ebc-441f-8ece-cc63c571363b [ 2411.835024] env[61663]: DEBUG oslo_concurrency.lockutils [None req-64809948-2809-4b09-a9ed-7c2ea3d83831 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Lock "ae347f45-f39e-47eb-9e37-80ddfc502c27" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 350.690s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2411.835024] env[61663]: DEBUG oslo_concurrency.lockutils [None req-64809948-2809-4b09-a9ed-7c2ea3d83831 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Acquiring lock "ae347f45-f39e-47eb-9e37-80ddfc502c27-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2411.835024] env[61663]: DEBUG oslo_concurrency.lockutils [None req-64809948-2809-4b09-a9ed-7c2ea3d83831 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Lock "ae347f45-f39e-47eb-9e37-80ddfc502c27-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2411.835024] env[61663]: DEBUG oslo_concurrency.lockutils [None req-64809948-2809-4b09-a9ed-7c2ea3d83831 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Lock "ae347f45-f39e-47eb-9e37-80ddfc502c27-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2411.837805] env[61663]: INFO nova.compute.manager [None req-64809948-2809-4b09-a9ed-7c2ea3d83831 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Terminating instance [ 2411.840605] env[61663]: DEBUG nova.compute.manager [None req-64809948-2809-4b09-a9ed-7c2ea3d83831 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2411.840605] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-64809948-2809-4b09-a9ed-7c2ea3d83831 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2411.840605] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9e768960-3123-449d-9057-72894a233f13 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.845860] env[61663]: DEBUG nova.compute.manager [None req-aa308cc7-b9fc-4d6e-b0e7-a13bf9b398c1 tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: cbd92feb-f2a1-41cf-8552-8cd0b0b20f0a] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2411.853467] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-594f4826-429b-4360-a445-9467d95f3263 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.864704] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e14fcea-4a44-4fa9-929e-f1363b61f837 tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Lock "668c457f-7ebc-441f-8ece-cc63c571363b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 634.003s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2411.866227] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1971f0c1-a57a-4587-949b-4e05e80433ca tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Lock "668c457f-7ebc-441f-8ece-cc63c571363b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 436.913s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2411.866444] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1971f0c1-a57a-4587-949b-4e05e80433ca tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Acquiring lock "668c457f-7ebc-441f-8ece-cc63c571363b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2411.866641] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1971f0c1-a57a-4587-949b-4e05e80433ca tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Lock "668c457f-7ebc-441f-8ece-cc63c571363b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2411.866805] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1971f0c1-a57a-4587-949b-4e05e80433ca tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Lock "668c457f-7ebc-441f-8ece-cc63c571363b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2411.869105] env[61663]: INFO nova.compute.manager [None req-1971f0c1-a57a-4587-949b-4e05e80433ca tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Terminating instance [ 2411.872572] env[61663]: DEBUG nova.compute.manager [None req-aa308cc7-b9fc-4d6e-b0e7-a13bf9b398c1 tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: cbd92feb-f2a1-41cf-8552-8cd0b0b20f0a] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 2411.885079] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-64809948-2809-4b09-a9ed-7c2ea3d83831 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ae347f45-f39e-47eb-9e37-80ddfc502c27 could not be found. [ 2411.885313] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-64809948-2809-4b09-a9ed-7c2ea3d83831 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2411.885495] env[61663]: INFO nova.compute.manager [None req-64809948-2809-4b09-a9ed-7c2ea3d83831 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2411.885731] env[61663]: DEBUG oslo.service.loopingcall [None req-64809948-2809-4b09-a9ed-7c2ea3d83831 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2411.886325] env[61663]: DEBUG nova.compute.manager [None req-1971f0c1-a57a-4587-949b-4e05e80433ca tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2411.886512] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-1971f0c1-a57a-4587-949b-4e05e80433ca tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2411.888846] env[61663]: DEBUG nova.compute.manager [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2411.891187] env[61663]: DEBUG nova.compute.manager [-] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2411.891292] env[61663]: DEBUG nova.network.neutron [-] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2411.892829] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7af39499-9824-45d3-9ac5-6cb8b21390cc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.901702] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-917141b3-2b36-4998-870f-9e7804a1d8c2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.913779] env[61663]: DEBUG oslo_concurrency.lockutils [None req-aa308cc7-b9fc-4d6e-b0e7-a13bf9b398c1 tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Lock "cbd92feb-f2a1-41cf-8552-8cd0b0b20f0a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.758s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2411.932719] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-1971f0c1-a57a-4587-949b-4e05e80433ca tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 668c457f-7ebc-441f-8ece-cc63c571363b could not be found. [ 2411.932933] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-1971f0c1-a57a-4587-949b-4e05e80433ca tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2411.933125] env[61663]: INFO nova.compute.manager [None req-1971f0c1-a57a-4587-949b-4e05e80433ca tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2411.933366] env[61663]: DEBUG oslo.service.loopingcall [None req-1971f0c1-a57a-4587-949b-4e05e80433ca tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2411.936354] env[61663]: DEBUG nova.compute.manager [-] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2411.936354] env[61663]: DEBUG nova.network.neutron [-] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2411.936893] env[61663]: DEBUG nova.compute.manager [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2411.956959] env[61663]: DEBUG oslo_concurrency.lockutils [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2411.957244] env[61663]: DEBUG oslo_concurrency.lockutils [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2411.960219] env[61663]: INFO nova.compute.claims [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2411.997978] env[61663]: DEBUG oslo_concurrency.lockutils [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2412.035941] env[61663]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61663) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2412.036274] env[61663]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-79625ed3-3f5e-449e-bd99-3f9dd60fe51b'] [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall self._deallocate_network( [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2412.036778] env[61663]: ERROR oslo.service.loopingcall [ 2412.038290] env[61663]: ERROR nova.compute.manager [None req-64809948-2809-4b09-a9ed-7c2ea3d83831 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2412.047669] env[61663]: DEBUG nova.network.neutron [-] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2412.059530] env[61663]: INFO nova.compute.manager [-] [instance: 668c457f-7ebc-441f-8ece-cc63c571363b] Took 0.12 seconds to deallocate network for instance. [ 2412.074657] env[61663]: ERROR nova.compute.manager [None req-64809948-2809-4b09-a9ed-7c2ea3d83831 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Traceback (most recent call last): [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] ret = obj(*args, **kwargs) [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] exception_handler_v20(status_code, error_body) [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] raise client_exc(message=error_message, [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Neutron server returns request_ids: ['req-79625ed3-3f5e-449e-bd99-3f9dd60fe51b'] [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] During handling of the above exception, another exception occurred: [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Traceback (most recent call last): [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] self._delete_instance(context, instance, bdms) [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] self._shutdown_instance(context, instance, bdms) [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] self._try_deallocate_network(context, instance, requested_networks) [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] with excutils.save_and_reraise_exception(): [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] self.force_reraise() [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] raise self.value [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] _deallocate_network_with_retries() [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return evt.wait() [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] result = hub.switch() [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self.greenlet.switch() [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] result = func(*self.args, **self.kw) [ 2412.074657] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] result = f(*args, **kwargs) [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] self._deallocate_network( [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] self.network_api.deallocate_for_instance( [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] data = neutron.list_ports(**search_opts) [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] ret = obj(*args, **kwargs) [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self.list('ports', self.ports_path, retrieve_all, [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] ret = obj(*args, **kwargs) [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] for r in self._pagination(collection, path, **params): [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] res = self.get(path, params=params) [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] ret = obj(*args, **kwargs) [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self.retry_request("GET", action, body=body, [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] ret = obj(*args, **kwargs) [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] return self.do_request(method, action, body=body, [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] ret = obj(*args, **kwargs) [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] self._handle_fault_response(status_code, replybody, resp) [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2412.075745] env[61663]: ERROR nova.compute.manager [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] [ 2412.100072] env[61663]: DEBUG oslo_concurrency.lockutils [None req-64809948-2809-4b09-a9ed-7c2ea3d83831 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Lock "ae347f45-f39e-47eb-9e37-80ddfc502c27" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.266s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2412.158565] env[61663]: INFO nova.compute.manager [None req-64809948-2809-4b09-a9ed-7c2ea3d83831 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] [instance: ae347f45-f39e-47eb-9e37-80ddfc502c27] Successfully reverted task state from None on failure for instance. [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server [None req-64809948-2809-4b09-a9ed-7c2ea3d83831 tempest-ServersTestMultiNic-1267577218 tempest-ServersTestMultiNic-1267577218-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-79625ed3-3f5e-449e-bd99-3f9dd60fe51b'] [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server raise self.value [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server raise self.value [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server raise self.value [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 2412.165169] env[61663]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server raise self.value [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server raise self.value [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server return evt.wait() [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2412.166689] env[61663]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2412.168118] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2412.168118] env[61663]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 2412.168118] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2412.168118] env[61663]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2412.168118] env[61663]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2412.168118] env[61663]: ERROR oslo_messaging.rpc.server [ 2412.168118] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1971f0c1-a57a-4587-949b-4e05e80433ca tempest-ListServerFiltersTestJSON-1331938971 tempest-ListServerFiltersTestJSON-1331938971-project-member] Lock "668c457f-7ebc-441f-8ece-cc63c571363b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.300s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2412.206979] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1c9b73-89b8-4829-88f9-72c0df239771 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.215961] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd736607-90ff-4e16-8d81-28d5bbb1555b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.250542] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b604550-5ce0-4fce-8f16-a63a2b145648 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.259273] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2215b14-04db-44e6-aa08-dedca70b75da {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.273051] env[61663]: DEBUG nova.compute.provider_tree [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2412.281959] env[61663]: DEBUG nova.scheduler.client.report [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2412.294439] env[61663]: DEBUG oslo_concurrency.lockutils [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.337s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2412.294898] env[61663]: DEBUG nova.compute.manager [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2412.297139] env[61663]: DEBUG oslo_concurrency.lockutils [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.299s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2412.298529] env[61663]: INFO nova.compute.claims [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2412.330045] env[61663]: DEBUG nova.compute.utils [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2412.331117] env[61663]: DEBUG nova.compute.manager [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Not allocating networking since 'none' was specified. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 2412.338136] env[61663]: DEBUG nova.compute.manager [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2412.399114] env[61663]: DEBUG nova.compute.manager [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2412.426510] env[61663]: DEBUG nova.virt.hardware [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2412.426738] env[61663]: DEBUG nova.virt.hardware [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2412.426897] env[61663]: DEBUG nova.virt.hardware [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2412.427090] env[61663]: DEBUG nova.virt.hardware [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2412.427242] env[61663]: DEBUG nova.virt.hardware [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2412.427392] env[61663]: DEBUG nova.virt.hardware [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2412.427602] env[61663]: DEBUG nova.virt.hardware [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2412.427763] env[61663]: DEBUG nova.virt.hardware [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2412.427964] env[61663]: DEBUG nova.virt.hardware [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2412.428568] env[61663]: DEBUG nova.virt.hardware [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2412.428776] env[61663]: DEBUG nova.virt.hardware [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2412.429773] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-405cc734-1d86-4fbe-a527-f9d9a2dc01b7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.439903] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d859b834-45c7-4566-8008-478d06190a21 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.456250] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Instance VIF info [] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2412.461759] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Creating folder: Project (7b0051a53cec4d1081425662f48423f1). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2412.464118] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-141052cb-7e37-4cfb-9c81-828c6b9c1651 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.473770] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Created folder: Project (7b0051a53cec4d1081425662f48423f1) in parent group-v352575. [ 2412.473957] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Creating folder: Instances. Parent ref: group-v352680. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2412.474222] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0fcb4587-53d6-4206-ad71-e94a7d93a30c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.484804] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Created folder: Instances in parent group-v352680. [ 2412.485045] env[61663]: DEBUG oslo.service.loopingcall [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2412.485233] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2412.485428] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8bdb3e7b-1871-42dd-8eaf-3c133864ce57 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.503588] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2412.503588] env[61663]: value = "task-1690867" [ 2412.503588] env[61663]: _type = "Task" [ 2412.503588] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2412.511337] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690867, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2412.545221] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5436337e-007a-4637-ab0b-fd143bc59180 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.552617] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4967681c-0024-4049-b8ae-b5b6b6c43fc3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.584249] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba636ed-0303-4cbc-925a-0291b6b11737 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.591616] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a38913d6-8a70-4a20-b452-a05c3134c4f3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.604860] env[61663]: DEBUG nova.compute.provider_tree [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2412.614219] env[61663]: DEBUG nova.scheduler.client.report [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2412.627733] env[61663]: DEBUG oslo_concurrency.lockutils [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.331s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2412.628246] env[61663]: DEBUG nova.compute.manager [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2412.662790] env[61663]: DEBUG nova.compute.utils [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2412.664348] env[61663]: DEBUG nova.compute.manager [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2412.664757] env[61663]: DEBUG nova.network.neutron [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2412.673370] env[61663]: DEBUG nova.compute.manager [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2412.737942] env[61663]: DEBUG nova.policy [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a3cf955b5e034583bdb45f8b7f5874ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ec01f323b53442b6b0869e0b09c4e5f9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 2412.741549] env[61663]: DEBUG nova.compute.manager [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2412.766699] env[61663]: DEBUG nova.virt.hardware [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2412.766944] env[61663]: DEBUG nova.virt.hardware [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2412.767114] env[61663]: DEBUG nova.virt.hardware [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2412.767299] env[61663]: DEBUG nova.virt.hardware [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2412.767444] env[61663]: DEBUG nova.virt.hardware [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2412.767592] env[61663]: DEBUG nova.virt.hardware [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2412.767799] env[61663]: DEBUG nova.virt.hardware [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2412.767961] env[61663]: DEBUG nova.virt.hardware [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2412.768144] env[61663]: DEBUG nova.virt.hardware [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2412.768312] env[61663]: DEBUG nova.virt.hardware [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2412.768484] env[61663]: DEBUG nova.virt.hardware [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2412.769379] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fb6e570-c40b-47c8-8d67-32459dd1103b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2412.777567] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50571134-d3ef-4a82-a95d-222bfc30a5e3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.015813] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690867, 'name': CreateVM_Task, 'duration_secs': 0.335087} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2413.015813] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2413.016128] env[61663]: DEBUG oslo_concurrency.lockutils [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2413.017410] env[61663]: DEBUG oslo_concurrency.lockutils [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2413.017410] env[61663]: DEBUG oslo_concurrency.lockutils [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2413.017410] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22af9b0e-17a8-4719-ad77-49cfd5404cab {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2413.021425] env[61663]: DEBUG oslo_vmware.api [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Waiting for the task: (returnval){ [ 2413.021425] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52cf4dcd-1f90-2d1f-47d6-df99cc6d8159" [ 2413.021425] env[61663]: _type = "Task" [ 2413.021425] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2413.028701] env[61663]: DEBUG oslo_vmware.api [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52cf4dcd-1f90-2d1f-47d6-df99cc6d8159, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2413.234797] env[61663]: DEBUG nova.network.neutron [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Successfully created port: 89fa18c1-dc1e-456a-8a4e-ac362c12ef05 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2413.531814] env[61663]: DEBUG oslo_vmware.api [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52cf4dcd-1f90-2d1f-47d6-df99cc6d8159, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2413.950974] env[61663]: DEBUG nova.compute.manager [req-0ffa770e-a826-46be-be4d-32c5119862d2 req-68621bc3-a02c-4218-a591-cd13161704a6 service nova] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Received event network-vif-plugged-89fa18c1-dc1e-456a-8a4e-ac362c12ef05 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2413.951220] env[61663]: DEBUG oslo_concurrency.lockutils [req-0ffa770e-a826-46be-be4d-32c5119862d2 req-68621bc3-a02c-4218-a591-cd13161704a6 service nova] Acquiring lock "0adee33d-8d0c-4bcf-8df4-11465be00485-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2413.951432] env[61663]: DEBUG oslo_concurrency.lockutils [req-0ffa770e-a826-46be-be4d-32c5119862d2 req-68621bc3-a02c-4218-a591-cd13161704a6 service nova] Lock "0adee33d-8d0c-4bcf-8df4-11465be00485-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2413.951697] env[61663]: DEBUG oslo_concurrency.lockutils [req-0ffa770e-a826-46be-be4d-32c5119862d2 req-68621bc3-a02c-4218-a591-cd13161704a6 service nova] Lock "0adee33d-8d0c-4bcf-8df4-11465be00485-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2413.954710] env[61663]: DEBUG nova.compute.manager [req-0ffa770e-a826-46be-be4d-32c5119862d2 req-68621bc3-a02c-4218-a591-cd13161704a6 service nova] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] No waiting events found dispatching network-vif-plugged-89fa18c1-dc1e-456a-8a4e-ac362c12ef05 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2413.954710] env[61663]: WARNING nova.compute.manager [req-0ffa770e-a826-46be-be4d-32c5119862d2 req-68621bc3-a02c-4218-a591-cd13161704a6 service nova] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Received unexpected event network-vif-plugged-89fa18c1-dc1e-456a-8a4e-ac362c12ef05 for instance with vm_state building and task_state spawning. [ 2413.959483] env[61663]: DEBUG nova.network.neutron [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Successfully updated port: 89fa18c1-dc1e-456a-8a4e-ac362c12ef05 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2413.970337] env[61663]: DEBUG oslo_concurrency.lockutils [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Acquiring lock "refresh_cache-0adee33d-8d0c-4bcf-8df4-11465be00485" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2413.970479] env[61663]: DEBUG oslo_concurrency.lockutils [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Acquired lock "refresh_cache-0adee33d-8d0c-4bcf-8df4-11465be00485" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2413.970642] env[61663]: DEBUG nova.network.neutron [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2414.012469] env[61663]: DEBUG nova.network.neutron [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2414.034029] env[61663]: DEBUG oslo_concurrency.lockutils [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2414.034281] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2414.034441] env[61663]: DEBUG oslo_concurrency.lockutils [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2414.310699] env[61663]: DEBUG nova.network.neutron [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Updating instance_info_cache with network_info: [{"id": "89fa18c1-dc1e-456a-8a4e-ac362c12ef05", "address": "fa:16:3e:d4:63:bb", "network": {"id": "3e05ee55-626f-41a6-82d4-e95ffd784e2d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-897997013-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec01f323b53442b6b0869e0b09c4e5f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1dc3a0d3-9578-4c45-bc16-13ed5b84f5b4", "external-id": "nsx-vlan-transportzone-817", "segmentation_id": 817, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89fa18c1-dc", "ovs_interfaceid": "89fa18c1-dc1e-456a-8a4e-ac362c12ef05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2414.323991] env[61663]: DEBUG oslo_concurrency.lockutils [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Releasing lock "refresh_cache-0adee33d-8d0c-4bcf-8df4-11465be00485" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2414.324299] env[61663]: DEBUG nova.compute.manager [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Instance network_info: |[{"id": "89fa18c1-dc1e-456a-8a4e-ac362c12ef05", "address": "fa:16:3e:d4:63:bb", "network": {"id": "3e05ee55-626f-41a6-82d4-e95ffd784e2d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-897997013-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec01f323b53442b6b0869e0b09c4e5f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1dc3a0d3-9578-4c45-bc16-13ed5b84f5b4", "external-id": "nsx-vlan-transportzone-817", "segmentation_id": 817, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89fa18c1-dc", "ovs_interfaceid": "89fa18c1-dc1e-456a-8a4e-ac362c12ef05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2414.324701] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:63:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1dc3a0d3-9578-4c45-bc16-13ed5b84f5b4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89fa18c1-dc1e-456a-8a4e-ac362c12ef05', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2414.332070] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Creating folder: Project (ec01f323b53442b6b0869e0b09c4e5f9). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2414.332645] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ad5bd1f0-4c01-4c4a-b1e2-c4699a9e6e8c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.343934] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Created folder: Project (ec01f323b53442b6b0869e0b09c4e5f9) in parent group-v352575. [ 2414.344134] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Creating folder: Instances. Parent ref: group-v352683. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2414.344358] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f300434-81c1-4888-bcac-f86b020ae628 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.352861] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Created folder: Instances in parent group-v352683. [ 2414.353102] env[61663]: DEBUG oslo.service.loopingcall [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2414.353282] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2414.353473] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a4b544e7-1ff3-432f-bca9-092f81fc1da6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.372194] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2414.372194] env[61663]: value = "task-1690870" [ 2414.372194] env[61663]: _type = "Task" [ 2414.372194] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2414.379462] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690870, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2414.881803] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690870, 'name': CreateVM_Task, 'duration_secs': 0.371661} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2414.882045] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2414.882746] env[61663]: DEBUG oslo_concurrency.lockutils [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2414.882913] env[61663]: DEBUG oslo_concurrency.lockutils [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2414.883280] env[61663]: DEBUG oslo_concurrency.lockutils [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2414.883537] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d2bd267-e6b9-423a-9626-633955717467 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2414.888179] env[61663]: DEBUG oslo_vmware.api [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Waiting for the task: (returnval){ [ 2414.888179] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e9e1ab-13b2-62f3-2f0e-0545e3cc0576" [ 2414.888179] env[61663]: _type = "Task" [ 2414.888179] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2414.895975] env[61663]: DEBUG oslo_vmware.api [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e9e1ab-13b2-62f3-2f0e-0545e3cc0576, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2415.398345] env[61663]: DEBUG oslo_concurrency.lockutils [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2415.398617] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2415.398818] env[61663]: DEBUG oslo_concurrency.lockutils [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2415.977264] env[61663]: DEBUG nova.compute.manager [req-163e0a8c-2437-4160-8049-4e868aff9a94 req-010a0365-4bc5-48d4-b6b5-013df157ca28 service nova] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Received event network-changed-89fa18c1-dc1e-456a-8a4e-ac362c12ef05 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2415.977482] env[61663]: DEBUG nova.compute.manager [req-163e0a8c-2437-4160-8049-4e868aff9a94 req-010a0365-4bc5-48d4-b6b5-013df157ca28 service nova] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Refreshing instance network info cache due to event network-changed-89fa18c1-dc1e-456a-8a4e-ac362c12ef05. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2415.977699] env[61663]: DEBUG oslo_concurrency.lockutils [req-163e0a8c-2437-4160-8049-4e868aff9a94 req-010a0365-4bc5-48d4-b6b5-013df157ca28 service nova] Acquiring lock "refresh_cache-0adee33d-8d0c-4bcf-8df4-11465be00485" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2415.977847] env[61663]: DEBUG oslo_concurrency.lockutils [req-163e0a8c-2437-4160-8049-4e868aff9a94 req-010a0365-4bc5-48d4-b6b5-013df157ca28 service nova] Acquired lock "refresh_cache-0adee33d-8d0c-4bcf-8df4-11465be00485" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2415.978022] env[61663]: DEBUG nova.network.neutron [req-163e0a8c-2437-4160-8049-4e868aff9a94 req-010a0365-4bc5-48d4-b6b5-013df157ca28 service nova] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Refreshing network info cache for port 89fa18c1-dc1e-456a-8a4e-ac362c12ef05 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2416.043312] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._sync_power_states {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2416.063771] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Getting list of instances from cluster (obj){ [ 2416.063771] env[61663]: value = "domain-c8" [ 2416.063771] env[61663]: _type = "ClusterComputeResource" [ 2416.063771] env[61663]: } {{(pid=61663) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2416.067079] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca643abe-4497-44a7-b82d-8248d2187508 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2416.083804] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Got total of 10 instances {{(pid=61663) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2416.083970] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Triggering sync for uuid 525749ba-7de2-4ec5-8f7b-1f4c291710fa {{(pid=61663) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2416.084278] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Triggering sync for uuid 740f7887-4a5c-4889-9635-e9d9c6607ee7 {{(pid=61663) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2416.084394] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Triggering sync for uuid 54a78c20-cbf6-453b-88e4-2fb4da0a6200 {{(pid=61663) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2416.084485] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Triggering sync for uuid 8e705624-9787-4d34-a3d4-f56b7b4fdcc2 {{(pid=61663) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2416.084633] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Triggering sync for uuid e47c9821-f815-4bd5-bf00-8822f08e3333 {{(pid=61663) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2416.084783] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Triggering sync for uuid c21a5af5-004b-4544-bcf0-f105d6f336c9 {{(pid=61663) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2416.084925] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Triggering sync for uuid 202e0f58-b057-4e57-8a92-c06d6efda570 {{(pid=61663) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2416.085083] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Triggering sync for uuid 67f8162d-a631-4f0e-b03c-fd76ee131615 {{(pid=61663) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2416.085230] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Triggering sync for uuid 029ec7ad-96a1-42e0-a926-c1aab1de05a8 {{(pid=61663) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2416.085375] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Triggering sync for uuid 0adee33d-8d0c-4bcf-8df4-11465be00485 {{(pid=61663) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 2416.085687] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "525749ba-7de2-4ec5-8f7b-1f4c291710fa" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2416.085925] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "740f7887-4a5c-4889-9635-e9d9c6607ee7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2416.086147] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "54a78c20-cbf6-453b-88e4-2fb4da0a6200" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2416.086344] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "8e705624-9787-4d34-a3d4-f56b7b4fdcc2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2416.086532] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "e47c9821-f815-4bd5-bf00-8822f08e3333" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2416.086723] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "c21a5af5-004b-4544-bcf0-f105d6f336c9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2416.086915] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "202e0f58-b057-4e57-8a92-c06d6efda570" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2416.087194] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "67f8162d-a631-4f0e-b03c-fd76ee131615" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2416.087390] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "029ec7ad-96a1-42e0-a926-c1aab1de05a8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2416.087590] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "0adee33d-8d0c-4bcf-8df4-11465be00485" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2416.232041] env[61663]: DEBUG nova.network.neutron [req-163e0a8c-2437-4160-8049-4e868aff9a94 req-010a0365-4bc5-48d4-b6b5-013df157ca28 service nova] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Updated VIF entry in instance network info cache for port 89fa18c1-dc1e-456a-8a4e-ac362c12ef05. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2416.232413] env[61663]: DEBUG nova.network.neutron [req-163e0a8c-2437-4160-8049-4e868aff9a94 req-010a0365-4bc5-48d4-b6b5-013df157ca28 service nova] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Updating instance_info_cache with network_info: [{"id": "89fa18c1-dc1e-456a-8a4e-ac362c12ef05", "address": "fa:16:3e:d4:63:bb", "network": {"id": "3e05ee55-626f-41a6-82d4-e95ffd784e2d", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-897997013-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ec01f323b53442b6b0869e0b09c4e5f9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1dc3a0d3-9578-4c45-bc16-13ed5b84f5b4", "external-id": "nsx-vlan-transportzone-817", "segmentation_id": 817, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89fa18c1-dc", "ovs_interfaceid": "89fa18c1-dc1e-456a-8a4e-ac362c12ef05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2416.242298] env[61663]: DEBUG oslo_concurrency.lockutils [req-163e0a8c-2437-4160-8049-4e868aff9a94 req-010a0365-4bc5-48d4-b6b5-013df157ca28 service nova] Releasing lock "refresh_cache-0adee33d-8d0c-4bcf-8df4-11465be00485" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2436.841742] env[61663]: DEBUG oslo_concurrency.lockutils [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "0266b3f5-ee31-46d7-af5e-844a27bfd829" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2436.842068] env[61663]: DEBUG oslo_concurrency.lockutils [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "0266b3f5-ee31-46d7-af5e-844a27bfd829" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2438.736566] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2439.692691] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2446.688291] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2446.691893] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2446.692069] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2446.692196] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2446.713914] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2446.714088] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2446.714222] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2446.714348] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2446.714469] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2446.714750] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2446.714931] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2446.715071] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2446.715195] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2446.715314] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2446.715433] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2447.692525] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2448.692693] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2448.693136] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2448.704686] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2448.705024] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2448.705328] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2448.705553] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2448.706779] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60152c3c-24d6-4f17-b912-a60a0790b6b8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2448.715445] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec5bf544-6e6c-42da-8cbb-d174358b9e03 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2448.728763] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b88ee61-3c9e-4532-8fe9-aa5b5b254d49 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2448.734921] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b080b534-3a8b-43a7-a6b4-3d00e0790795 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2448.764420] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181319MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2448.764557] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2448.764730] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2448.840849] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 525749ba-7de2-4ec5-8f7b-1f4c291710fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2448.841032] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 740f7887-4a5c-4889-9635-e9d9c6607ee7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2448.841170] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 54a78c20-cbf6-453b-88e4-2fb4da0a6200 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2448.841294] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 8e705624-9787-4d34-a3d4-f56b7b4fdcc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2448.841412] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance e47c9821-f815-4bd5-bf00-8822f08e3333 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2448.841527] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance c21a5af5-004b-4544-bcf0-f105d6f336c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2448.841640] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 202e0f58-b057-4e57-8a92-c06d6efda570 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2448.841756] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 67f8162d-a631-4f0e-b03c-fd76ee131615 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2448.841875] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 029ec7ad-96a1-42e0-a926-c1aab1de05a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2448.841995] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 0adee33d-8d0c-4bcf-8df4-11465be00485 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2448.853536] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2448.863742] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5002feba-1ffc-4957-ad11-712fce784ef4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2448.873390] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2448.882511] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 8fc36ed9-9315-4bdb-b4f3-248106a3c681 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2448.891297] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 0266b3f5-ee31-46d7-af5e-844a27bfd829 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2448.891518] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2448.891743] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2449.056587] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-018c06f3-c227-4324-babb-bccb573779b1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2449.064387] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b737ac0-413d-4d31-ac9f-1b4bbd099bfc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2449.094243] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-637081f6-85b0-455b-b571-cff98bc4b6dc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2449.100899] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51aecb00-2396-4937-a64e-0e0038909e69 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2449.113522] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2449.121690] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2449.137884] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2449.138082] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.373s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2451.137455] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2455.692705] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2455.693021] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2457.303884] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5cbc923e-af56-4134-9e31-e3b6ac9f1970 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Acquiring lock "029ec7ad-96a1-42e0-a926-c1aab1de05a8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2458.514076] env[61663]: WARNING oslo_vmware.rw_handles [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2458.514076] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2458.514076] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2458.514076] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2458.514076] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2458.514076] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 2458.514076] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2458.514076] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2458.514076] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2458.514076] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2458.514076] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2458.514076] env[61663]: ERROR oslo_vmware.rw_handles [ 2458.514076] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/f99a6fd0-a99b-4545-bcac-8e34b3a1893a/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2458.516105] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2458.516360] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Copying Virtual Disk [datastore1] vmware_temp/f99a6fd0-a99b-4545-bcac-8e34b3a1893a/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/f99a6fd0-a99b-4545-bcac-8e34b3a1893a/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2458.516661] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-936f77a6-cf89-4cbd-b275-6239e9b2ee93 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2458.524708] env[61663]: DEBUG oslo_vmware.api [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Waiting for the task: (returnval){ [ 2458.524708] env[61663]: value = "task-1690871" [ 2458.524708] env[61663]: _type = "Task" [ 2458.524708] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2458.533068] env[61663]: DEBUG oslo_vmware.api [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Task: {'id': task-1690871, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2459.035067] env[61663]: DEBUG oslo_vmware.exceptions [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2459.035351] env[61663]: DEBUG oslo_concurrency.lockutils [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2459.035904] env[61663]: ERROR nova.compute.manager [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2459.035904] env[61663]: Faults: ['InvalidArgument'] [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Traceback (most recent call last): [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] yield resources [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] self.driver.spawn(context, instance, image_meta, [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] self._fetch_image_if_missing(context, vi) [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] image_cache(vi, tmp_image_ds_loc) [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] vm_util.copy_virtual_disk( [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] session._wait_for_task(vmdk_copy_task) [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] return self.wait_for_task(task_ref) [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] return evt.wait() [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] result = hub.switch() [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] return self.greenlet.switch() [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] self.f(*self.args, **self.kw) [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] raise exceptions.translate_fault(task_info.error) [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Faults: ['InvalidArgument'] [ 2459.035904] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] [ 2459.036866] env[61663]: INFO nova.compute.manager [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Terminating instance [ 2459.037805] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2459.037979] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2459.038231] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1105a92-632b-4f4f-848b-5ac0e46a89d9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.040627] env[61663]: DEBUG nova.compute.manager [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2459.040842] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2459.041576] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2838b63-78a7-472c-8a84-2e2a56c4b90d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.047980] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2459.048202] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f5bce6b8-f1de-41b0-86a8-885fd33a8c73 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.050261] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2459.050435] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2459.051382] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f9bbd67-64a6-4499-97a6-797ef30a0968 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.056126] env[61663]: DEBUG oslo_vmware.api [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Waiting for the task: (returnval){ [ 2459.056126] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5206bbd3-b1fc-05a9-fa51-f0b4604fd35b" [ 2459.056126] env[61663]: _type = "Task" [ 2459.056126] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2459.062869] env[61663]: DEBUG oslo_vmware.api [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5206bbd3-b1fc-05a9-fa51-f0b4604fd35b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2459.118698] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2459.118901] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2459.119096] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Deleting the datastore file [datastore1] 525749ba-7de2-4ec5-8f7b-1f4c291710fa {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2459.119367] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-54dd9d0c-d6db-481f-9534-f1f9b10a94b4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.126346] env[61663]: DEBUG oslo_vmware.api [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Waiting for the task: (returnval){ [ 2459.126346] env[61663]: value = "task-1690873" [ 2459.126346] env[61663]: _type = "Task" [ 2459.126346] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2459.133714] env[61663]: DEBUG oslo_vmware.api [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Task: {'id': task-1690873, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2459.568265] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2459.568603] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Creating directory with path [datastore1] vmware_temp/f6454763-87f4-493d-b4eb-eea41ffc9413/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2459.568816] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-036b76df-8a11-4aa1-b862-3ca8ae58e2b5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.580032] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Created directory with path [datastore1] vmware_temp/f6454763-87f4-493d-b4eb-eea41ffc9413/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2459.580272] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Fetch image to [datastore1] vmware_temp/f6454763-87f4-493d-b4eb-eea41ffc9413/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2459.580406] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/f6454763-87f4-493d-b4eb-eea41ffc9413/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2459.581180] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81979bcd-c18b-44d3-b6cf-eba3275e054f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.587783] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55360902-622d-4950-bab2-d4e665ffa357 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.596667] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3512ad14-b5c3-40dd-830f-41c20c7f2973 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.626480] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-438c1f5f-dc78-4b75-ae5b-c3c2fef13f41 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.637614] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-29ff0745-e5bf-4995-b9d9-10f26be52c6a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.639290] env[61663]: DEBUG oslo_vmware.api [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Task: {'id': task-1690873, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074413} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2459.639523] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2459.639704] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2459.639876] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2459.640060] env[61663]: INFO nova.compute.manager [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2459.642193] env[61663]: DEBUG nova.compute.claims [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2459.642399] env[61663]: DEBUG oslo_concurrency.lockutils [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2459.642620] env[61663]: DEBUG oslo_concurrency.lockutils [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2459.660333] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2459.710050] env[61663]: DEBUG oslo_vmware.rw_handles [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f6454763-87f4-493d-b4eb-eea41ffc9413/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2459.774701] env[61663]: DEBUG oslo_vmware.rw_handles [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2459.774912] env[61663]: DEBUG oslo_vmware.rw_handles [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f6454763-87f4-493d-b4eb-eea41ffc9413/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2459.898907] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74eb0e29-b692-472a-8084-575392c81be8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.906700] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6cbdcee-5d88-46d2-88a4-39a849020a6a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.936879] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de37a3fc-fb79-4baa-ba31-745c85c492c5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.943591] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-363c39c4-ccfb-4ecb-9582-08df059b0d9e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2459.955946] env[61663]: DEBUG nova.compute.provider_tree [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2459.964308] env[61663]: DEBUG nova.scheduler.client.report [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2459.979675] env[61663]: DEBUG oslo_concurrency.lockutils [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.337s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2459.980197] env[61663]: ERROR nova.compute.manager [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2459.980197] env[61663]: Faults: ['InvalidArgument'] [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Traceback (most recent call last): [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] self.driver.spawn(context, instance, image_meta, [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] self._fetch_image_if_missing(context, vi) [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] image_cache(vi, tmp_image_ds_loc) [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] vm_util.copy_virtual_disk( [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] session._wait_for_task(vmdk_copy_task) [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] return self.wait_for_task(task_ref) [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] return evt.wait() [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] result = hub.switch() [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] return self.greenlet.switch() [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] self.f(*self.args, **self.kw) [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] raise exceptions.translate_fault(task_info.error) [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Faults: ['InvalidArgument'] [ 2459.980197] env[61663]: ERROR nova.compute.manager [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] [ 2459.981252] env[61663]: DEBUG nova.compute.utils [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2459.982366] env[61663]: DEBUG nova.compute.manager [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Build of instance 525749ba-7de2-4ec5-8f7b-1f4c291710fa was re-scheduled: A specified parameter was not correct: fileType [ 2459.982366] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2459.982723] env[61663]: DEBUG nova.compute.manager [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2459.982896] env[61663]: DEBUG nova.compute.manager [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2459.983091] env[61663]: DEBUG nova.compute.manager [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2459.983263] env[61663]: DEBUG nova.network.neutron [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2460.270586] env[61663]: DEBUG nova.network.neutron [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2460.281099] env[61663]: INFO nova.compute.manager [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Took 0.30 seconds to deallocate network for instance. [ 2460.371825] env[61663]: INFO nova.scheduler.client.report [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Deleted allocations for instance 525749ba-7de2-4ec5-8f7b-1f4c291710fa [ 2460.395736] env[61663]: DEBUG oslo_concurrency.lockutils [None req-557a1550-6188-4bbc-a8f9-52fde8b259db tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Lock "525749ba-7de2-4ec5-8f7b-1f4c291710fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 574.671s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2460.396916] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e9e9aa7b-c220-4711-bfe0-684908579aa9 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Lock "525749ba-7de2-4ec5-8f7b-1f4c291710fa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 379.009s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2460.397041] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e9e9aa7b-c220-4711-bfe0-684908579aa9 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Acquiring lock "525749ba-7de2-4ec5-8f7b-1f4c291710fa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2460.397219] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e9e9aa7b-c220-4711-bfe0-684908579aa9 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Lock "525749ba-7de2-4ec5-8f7b-1f4c291710fa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2460.397406] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e9e9aa7b-c220-4711-bfe0-684908579aa9 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Lock "525749ba-7de2-4ec5-8f7b-1f4c291710fa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2460.399338] env[61663]: INFO nova.compute.manager [None req-e9e9aa7b-c220-4711-bfe0-684908579aa9 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Terminating instance [ 2460.401676] env[61663]: DEBUG nova.compute.manager [None req-e9e9aa7b-c220-4711-bfe0-684908579aa9 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2460.401888] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-e9e9aa7b-c220-4711-bfe0-684908579aa9 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2460.402211] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8774e46e-8f5e-42d0-aea1-d4562660894d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2460.411486] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-429f6e90-8357-4ea6-ad61-a224c2badcd5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2460.424347] env[61663]: DEBUG nova.compute.manager [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2460.445974] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-e9e9aa7b-c220-4711-bfe0-684908579aa9 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 525749ba-7de2-4ec5-8f7b-1f4c291710fa could not be found. [ 2460.445974] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-e9e9aa7b-c220-4711-bfe0-684908579aa9 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2460.445974] env[61663]: INFO nova.compute.manager [None req-e9e9aa7b-c220-4711-bfe0-684908579aa9 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2460.446199] env[61663]: DEBUG oslo.service.loopingcall [None req-e9e9aa7b-c220-4711-bfe0-684908579aa9 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2460.446412] env[61663]: DEBUG nova.compute.manager [-] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2460.446503] env[61663]: DEBUG nova.network.neutron [-] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2460.476571] env[61663]: DEBUG nova.network.neutron [-] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2460.478249] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2460.478478] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2460.479899] env[61663]: INFO nova.compute.claims [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2460.492018] env[61663]: INFO nova.compute.manager [-] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] Took 0.05 seconds to deallocate network for instance. [ 2460.581943] env[61663]: DEBUG oslo_concurrency.lockutils [None req-e9e9aa7b-c220-4711-bfe0-684908579aa9 tempest-ImagesTestJSON-394939486 tempest-ImagesTestJSON-394939486-project-member] Lock "525749ba-7de2-4ec5-8f7b-1f4c291710fa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.185s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2460.582968] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "525749ba-7de2-4ec5-8f7b-1f4c291710fa" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 44.497s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2460.583173] env[61663]: INFO nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 525749ba-7de2-4ec5-8f7b-1f4c291710fa] During sync_power_state the instance has a pending task (deleting). Skip. [ 2460.583387] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "525749ba-7de2-4ec5-8f7b-1f4c291710fa" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2460.701201] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-866c981f-a489-4a11-959c-17ac20b485bc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2460.708776] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f5d628f-54fd-4e8e-894a-48f90d365bc4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2460.738007] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c51a70-2b81-4ec0-8b0d-86481e2cd418 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2460.745242] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-899fda03-7138-463b-95ee-4b1a999312be {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2460.758586] env[61663]: DEBUG nova.compute.provider_tree [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2460.766979] env[61663]: DEBUG nova.scheduler.client.report [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2460.780017] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.301s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2460.780469] env[61663]: DEBUG nova.compute.manager [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2460.813018] env[61663]: DEBUG nova.compute.utils [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2460.814364] env[61663]: DEBUG nova.compute.manager [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2460.814534] env[61663]: DEBUG nova.network.neutron [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2460.823633] env[61663]: DEBUG nova.compute.manager [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2460.876632] env[61663]: DEBUG nova.policy [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9ecb588edff64911bf5120de68b010eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8b90f6021c544484902ae30054503895', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 2460.886244] env[61663]: DEBUG nova.compute.manager [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2460.911382] env[61663]: DEBUG nova.virt.hardware [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2460.911628] env[61663]: DEBUG nova.virt.hardware [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2460.911788] env[61663]: DEBUG nova.virt.hardware [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2460.911969] env[61663]: DEBUG nova.virt.hardware [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2460.912160] env[61663]: DEBUG nova.virt.hardware [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2460.912332] env[61663]: DEBUG nova.virt.hardware [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2460.912539] env[61663]: DEBUG nova.virt.hardware [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2460.912702] env[61663]: DEBUG nova.virt.hardware [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2460.912866] env[61663]: DEBUG nova.virt.hardware [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2460.913045] env[61663]: DEBUG nova.virt.hardware [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2460.913230] env[61663]: DEBUG nova.virt.hardware [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2460.914070] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c956597-8cde-4dcd-88f6-ef69beadf955 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2460.921710] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f30f5aa-273d-4e16-8f96-b7b9f493ee0a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2461.179754] env[61663]: DEBUG nova.network.neutron [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Successfully created port: e9f59d63-0288-4b65-b080-ed451b72f5b8 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2462.071479] env[61663]: DEBUG nova.network.neutron [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Successfully updated port: e9f59d63-0288-4b65-b080-ed451b72f5b8 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2462.083372] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquiring lock "refresh_cache-18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2462.083372] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquired lock "refresh_cache-18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2462.083372] env[61663]: DEBUG nova.network.neutron [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2462.158056] env[61663]: DEBUG nova.network.neutron [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2462.315762] env[61663]: DEBUG nova.compute.manager [req-4532c0ff-8a5d-4da1-ba31-be4f9b1dbe8f req-0c14febf-d8cc-4e8b-8a2f-698a5b90ba8e service nova] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Received event network-vif-plugged-e9f59d63-0288-4b65-b080-ed451b72f5b8 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2462.315990] env[61663]: DEBUG oslo_concurrency.lockutils [req-4532c0ff-8a5d-4da1-ba31-be4f9b1dbe8f req-0c14febf-d8cc-4e8b-8a2f-698a5b90ba8e service nova] Acquiring lock "18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2462.316615] env[61663]: DEBUG oslo_concurrency.lockutils [req-4532c0ff-8a5d-4da1-ba31-be4f9b1dbe8f req-0c14febf-d8cc-4e8b-8a2f-698a5b90ba8e service nova] Lock "18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2462.316767] env[61663]: DEBUG oslo_concurrency.lockutils [req-4532c0ff-8a5d-4da1-ba31-be4f9b1dbe8f req-0c14febf-d8cc-4e8b-8a2f-698a5b90ba8e service nova] Lock "18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2462.316921] env[61663]: DEBUG nova.compute.manager [req-4532c0ff-8a5d-4da1-ba31-be4f9b1dbe8f req-0c14febf-d8cc-4e8b-8a2f-698a5b90ba8e service nova] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] No waiting events found dispatching network-vif-plugged-e9f59d63-0288-4b65-b080-ed451b72f5b8 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2462.317110] env[61663]: WARNING nova.compute.manager [req-4532c0ff-8a5d-4da1-ba31-be4f9b1dbe8f req-0c14febf-d8cc-4e8b-8a2f-698a5b90ba8e service nova] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Received unexpected event network-vif-plugged-e9f59d63-0288-4b65-b080-ed451b72f5b8 for instance with vm_state building and task_state spawning. [ 2462.317282] env[61663]: DEBUG nova.compute.manager [req-4532c0ff-8a5d-4da1-ba31-be4f9b1dbe8f req-0c14febf-d8cc-4e8b-8a2f-698a5b90ba8e service nova] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Received event network-changed-e9f59d63-0288-4b65-b080-ed451b72f5b8 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2462.317435] env[61663]: DEBUG nova.compute.manager [req-4532c0ff-8a5d-4da1-ba31-be4f9b1dbe8f req-0c14febf-d8cc-4e8b-8a2f-698a5b90ba8e service nova] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Refreshing instance network info cache due to event network-changed-e9f59d63-0288-4b65-b080-ed451b72f5b8. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2462.317603] env[61663]: DEBUG oslo_concurrency.lockutils [req-4532c0ff-8a5d-4da1-ba31-be4f9b1dbe8f req-0c14febf-d8cc-4e8b-8a2f-698a5b90ba8e service nova] Acquiring lock "refresh_cache-18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2462.321607] env[61663]: DEBUG nova.network.neutron [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Updating instance_info_cache with network_info: [{"id": "e9f59d63-0288-4b65-b080-ed451b72f5b8", "address": "fa:16:3e:14:6c:76", "network": {"id": "bd3a627c-746f-4e8f-8223-f0f3b30c965b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-924597021-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b90f6021c544484902ae30054503895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9f59d63-02", "ovs_interfaceid": "e9f59d63-0288-4b65-b080-ed451b72f5b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2462.335026] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Releasing lock "refresh_cache-18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2462.335026] env[61663]: DEBUG nova.compute.manager [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Instance network_info: |[{"id": "e9f59d63-0288-4b65-b080-ed451b72f5b8", "address": "fa:16:3e:14:6c:76", "network": {"id": "bd3a627c-746f-4e8f-8223-f0f3b30c965b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-924597021-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b90f6021c544484902ae30054503895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9f59d63-02", "ovs_interfaceid": "e9f59d63-0288-4b65-b080-ed451b72f5b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2462.335162] env[61663]: DEBUG oslo_concurrency.lockutils [req-4532c0ff-8a5d-4da1-ba31-be4f9b1dbe8f req-0c14febf-d8cc-4e8b-8a2f-698a5b90ba8e service nova] Acquired lock "refresh_cache-18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2462.335312] env[61663]: DEBUG nova.network.neutron [req-4532c0ff-8a5d-4da1-ba31-be4f9b1dbe8f req-0c14febf-d8cc-4e8b-8a2f-698a5b90ba8e service nova] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Refreshing network info cache for port e9f59d63-0288-4b65-b080-ed451b72f5b8 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2462.336283] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:6c:76', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1c797172-a569-458e-aeb0-3f21e589a740', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e9f59d63-0288-4b65-b080-ed451b72f5b8', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2462.343623] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Creating folder: Project (8b90f6021c544484902ae30054503895). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2462.346306] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-152558a1-5451-4126-aefc-06fef4a320f8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.357139] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Created folder: Project (8b90f6021c544484902ae30054503895) in parent group-v352575. [ 2462.357322] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Creating folder: Instances. Parent ref: group-v352686. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2462.357531] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5d2c35e-b570-49b6-80a3-16ae60efb346 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.365979] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Created folder: Instances in parent group-v352686. [ 2462.366209] env[61663]: DEBUG oslo.service.loopingcall [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2462.366381] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2462.366562] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9d54b188-85ea-41a6-aaec-0edeabfbdd29 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.386842] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2462.386842] env[61663]: value = "task-1690876" [ 2462.386842] env[61663]: _type = "Task" [ 2462.386842] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2462.398224] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690876, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2462.896326] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690876, 'name': CreateVM_Task, 'duration_secs': 0.285124} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2462.896507] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2462.897236] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2462.897412] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2462.897722] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2462.898143] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdcbb499-f125-4e27-96b1-ba2dcb6318f2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.902477] env[61663]: DEBUG oslo_vmware.api [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Waiting for the task: (returnval){ [ 2462.902477] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f7a72c-9051-467c-b7ae-3821c901303d" [ 2462.902477] env[61663]: _type = "Task" [ 2462.902477] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2462.910510] env[61663]: DEBUG oslo_vmware.api [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f7a72c-9051-467c-b7ae-3821c901303d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2462.944280] env[61663]: DEBUG nova.network.neutron [req-4532c0ff-8a5d-4da1-ba31-be4f9b1dbe8f req-0c14febf-d8cc-4e8b-8a2f-698a5b90ba8e service nova] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Updated VIF entry in instance network info cache for port e9f59d63-0288-4b65-b080-ed451b72f5b8. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2462.944649] env[61663]: DEBUG nova.network.neutron [req-4532c0ff-8a5d-4da1-ba31-be4f9b1dbe8f req-0c14febf-d8cc-4e8b-8a2f-698a5b90ba8e service nova] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Updating instance_info_cache with network_info: [{"id": "e9f59d63-0288-4b65-b080-ed451b72f5b8", "address": "fa:16:3e:14:6c:76", "network": {"id": "bd3a627c-746f-4e8f-8223-f0f3b30c965b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-924597021-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b90f6021c544484902ae30054503895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9f59d63-02", "ovs_interfaceid": "e9f59d63-0288-4b65-b080-ed451b72f5b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2462.954216] env[61663]: DEBUG oslo_concurrency.lockutils [req-4532c0ff-8a5d-4da1-ba31-be4f9b1dbe8f req-0c14febf-d8cc-4e8b-8a2f-698a5b90ba8e service nova] Releasing lock "refresh_cache-18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2463.236545] env[61663]: DEBUG oslo_concurrency.lockutils [None req-7239e18d-2df9-430d-b37a-034ece427667 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Acquiring lock "0adee33d-8d0c-4bcf-8df4-11465be00485" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2463.413466] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2463.413734] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2463.413953] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2479.837844] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c61b9f4f-4bfd-4ea1-a984-19ffdd342b9a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquiring lock "18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2490.947851] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquiring lock "911c036c-c7d8-4ff7-b874-335361fb5281" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2490.948155] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Lock "911c036c-c7d8-4ff7-b874-335361fb5281" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2499.692596] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2499.692870] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2506.687744] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2507.692056] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2507.692352] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2507.692392] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2507.715474] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2507.715663] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2507.716052] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2507.716240] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2507.716397] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2507.716558] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2507.716710] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2507.716852] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2507.717016] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2507.717178] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2507.717319] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2508.531656] env[61663]: WARNING oslo_vmware.rw_handles [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2508.531656] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2508.531656] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2508.531656] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2508.531656] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2508.531656] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 2508.531656] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2508.531656] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2508.531656] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2508.531656] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2508.531656] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2508.531656] env[61663]: ERROR oslo_vmware.rw_handles [ 2508.532188] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/f6454763-87f4-493d-b4eb-eea41ffc9413/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2508.534319] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2508.534593] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Copying Virtual Disk [datastore1] vmware_temp/f6454763-87f4-493d-b4eb-eea41ffc9413/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/f6454763-87f4-493d-b4eb-eea41ffc9413/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2508.534888] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1c23a0f1-8130-4934-b651-478ea181201f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2508.543650] env[61663]: DEBUG oslo_vmware.api [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Waiting for the task: (returnval){ [ 2508.543650] env[61663]: value = "task-1690878" [ 2508.543650] env[61663]: _type = "Task" [ 2508.543650] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2508.553036] env[61663]: DEBUG oslo_vmware.api [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Task: {'id': task-1690878, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2508.691850] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2509.054213] env[61663]: DEBUG oslo_vmware.exceptions [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2509.054567] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2509.054986] env[61663]: ERROR nova.compute.manager [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2509.054986] env[61663]: Faults: ['InvalidArgument'] [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Traceback (most recent call last): [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] yield resources [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] self.driver.spawn(context, instance, image_meta, [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] self._fetch_image_if_missing(context, vi) [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] image_cache(vi, tmp_image_ds_loc) [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] vm_util.copy_virtual_disk( [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] session._wait_for_task(vmdk_copy_task) [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] return self.wait_for_task(task_ref) [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] return evt.wait() [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] result = hub.switch() [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] return self.greenlet.switch() [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] self.f(*self.args, **self.kw) [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] raise exceptions.translate_fault(task_info.error) [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Faults: ['InvalidArgument'] [ 2509.054986] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] [ 2509.055914] env[61663]: INFO nova.compute.manager [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Terminating instance [ 2509.056835] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2509.057100] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2509.057340] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-47461492-c22b-42bb-bdfb-f620b5a68dfd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.059432] env[61663]: DEBUG nova.compute.manager [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2509.059630] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2509.060330] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1c019a-a5d7-4ffc-a98a-a5a7a5079673 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.066996] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2509.067213] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8f397783-4096-4130-9099-ecb09b194e93 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.069260] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2509.069435] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2509.070340] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8bd549da-e7d9-449b-adb2-0b6196e980d7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.074819] env[61663]: DEBUG oslo_vmware.api [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Waiting for the task: (returnval){ [ 2509.074819] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52cfa155-8a03-c945-6b26-44b44097fe16" [ 2509.074819] env[61663]: _type = "Task" [ 2509.074819] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2509.081802] env[61663]: DEBUG oslo_vmware.api [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52cfa155-8a03-c945-6b26-44b44097fe16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2509.135819] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2509.136050] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2509.136244] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Deleting the datastore file [datastore1] 740f7887-4a5c-4889-9635-e9d9c6607ee7 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2509.136503] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c7c18a9b-db1d-49b9-94df-9c777a4922ab {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.142575] env[61663]: DEBUG oslo_vmware.api [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Waiting for the task: (returnval){ [ 2509.142575] env[61663]: value = "task-1690880" [ 2509.142575] env[61663]: _type = "Task" [ 2509.142575] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2509.150337] env[61663]: DEBUG oslo_vmware.api [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Task: {'id': task-1690880, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2509.585250] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2509.585508] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Creating directory with path [datastore1] vmware_temp/d1f9919f-1902-4f8c-8240-2b293dcddab5/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2509.585735] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a79bbac-79aa-4705-886d-91a5e6972b98 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.597132] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Created directory with path [datastore1] vmware_temp/d1f9919f-1902-4f8c-8240-2b293dcddab5/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2509.597326] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Fetch image to [datastore1] vmware_temp/d1f9919f-1902-4f8c-8240-2b293dcddab5/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2509.597496] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/d1f9919f-1902-4f8c-8240-2b293dcddab5/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2509.598213] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee113e4-f2d2-4da7-9291-594e0d5dfd7d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.604631] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1927c20-0348-475b-bbab-a7c0bd41a020 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.613461] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e86745f-c9fc-4137-bb12-bb3867b20bb7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.643641] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eac7994-e262-4b7e-a5e6-788af40a83b2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.655264] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f3619f13-79cd-42e1-ba24-cf3cf34c242a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.656958] env[61663]: DEBUG oslo_vmware.api [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Task: {'id': task-1690880, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081711} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2509.657204] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2509.657415] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2509.657598] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2509.657769] env[61663]: INFO nova.compute.manager [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2509.659878] env[61663]: DEBUG nova.compute.claims [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2509.660052] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2509.660302] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2509.678274] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2509.728543] env[61663]: DEBUG oslo_vmware.rw_handles [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d1f9919f-1902-4f8c-8240-2b293dcddab5/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2509.789412] env[61663]: DEBUG oslo_vmware.rw_handles [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2509.789652] env[61663]: DEBUG oslo_vmware.rw_handles [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d1f9919f-1902-4f8c-8240-2b293dcddab5/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2509.918733] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d959370-1a48-4d55-9269-4eff465872b8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.926874] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e1ce0d-2502-4f98-80a6-1a593e20b8c0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.956207] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d24af6b7-247c-4a49-b242-cec3052810a2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.963391] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75067fa1-55c0-453b-b533-042b4662f721 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2509.976274] env[61663]: DEBUG nova.compute.provider_tree [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2509.985652] env[61663]: DEBUG nova.scheduler.client.report [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2510.000716] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.340s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2510.001303] env[61663]: ERROR nova.compute.manager [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2510.001303] env[61663]: Faults: ['InvalidArgument'] [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Traceback (most recent call last): [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] self.driver.spawn(context, instance, image_meta, [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] self._fetch_image_if_missing(context, vi) [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] image_cache(vi, tmp_image_ds_loc) [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] vm_util.copy_virtual_disk( [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] session._wait_for_task(vmdk_copy_task) [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] return self.wait_for_task(task_ref) [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] return evt.wait() [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] result = hub.switch() [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] return self.greenlet.switch() [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] self.f(*self.args, **self.kw) [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] raise exceptions.translate_fault(task_info.error) [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Faults: ['InvalidArgument'] [ 2510.001303] env[61663]: ERROR nova.compute.manager [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] [ 2510.002250] env[61663]: DEBUG nova.compute.utils [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2510.003424] env[61663]: DEBUG nova.compute.manager [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Build of instance 740f7887-4a5c-4889-9635-e9d9c6607ee7 was re-scheduled: A specified parameter was not correct: fileType [ 2510.003424] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2510.003784] env[61663]: DEBUG nova.compute.manager [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2510.003959] env[61663]: DEBUG nova.compute.manager [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2510.004150] env[61663]: DEBUG nova.compute.manager [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2510.004331] env[61663]: DEBUG nova.network.neutron [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2510.366898] env[61663]: DEBUG nova.network.neutron [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2510.378227] env[61663]: INFO nova.compute.manager [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Took 0.37 seconds to deallocate network for instance. [ 2510.473133] env[61663]: INFO nova.scheduler.client.report [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Deleted allocations for instance 740f7887-4a5c-4889-9635-e9d9c6607ee7 [ 2510.495983] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52d7055e-773b-442c-b7b6-1673e06f6a49 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Lock "740f7887-4a5c-4889-9635-e9d9c6607ee7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 610.016s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2510.497326] env[61663]: DEBUG oslo_concurrency.lockutils [None req-6427d398-dbd8-4812-9607-7f99b340b1e8 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Lock "740f7887-4a5c-4889-9635-e9d9c6607ee7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 414.254s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2510.497452] env[61663]: DEBUG oslo_concurrency.lockutils [None req-6427d398-dbd8-4812-9607-7f99b340b1e8 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquiring lock "740f7887-4a5c-4889-9635-e9d9c6607ee7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2510.497686] env[61663]: DEBUG oslo_concurrency.lockutils [None req-6427d398-dbd8-4812-9607-7f99b340b1e8 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Lock "740f7887-4a5c-4889-9635-e9d9c6607ee7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2510.497869] env[61663]: DEBUG oslo_concurrency.lockutils [None req-6427d398-dbd8-4812-9607-7f99b340b1e8 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Lock "740f7887-4a5c-4889-9635-e9d9c6607ee7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2510.500022] env[61663]: INFO nova.compute.manager [None req-6427d398-dbd8-4812-9607-7f99b340b1e8 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Terminating instance [ 2510.501969] env[61663]: DEBUG nova.compute.manager [None req-6427d398-dbd8-4812-9607-7f99b340b1e8 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2510.502235] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-6427d398-dbd8-4812-9607-7f99b340b1e8 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2510.502798] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0711ece7-d868-4ce1-8f43-20a39a2318c3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2510.507569] env[61663]: DEBUG nova.compute.manager [None req-daad2c63-2259-418d-8d1e-139537dfebac tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 5002feba-1ffc-4957-ad11-712fce784ef4] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2510.513770] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d823822-c354-4d60-95d0-26c173e8b9ca {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2510.532404] env[61663]: DEBUG nova.compute.manager [None req-daad2c63-2259-418d-8d1e-139537dfebac tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 5002feba-1ffc-4957-ad11-712fce784ef4] Instance disappeared before build. {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 2510.541762] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-6427d398-dbd8-4812-9607-7f99b340b1e8 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 740f7887-4a5c-4889-9635-e9d9c6607ee7 could not be found. [ 2510.541960] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-6427d398-dbd8-4812-9607-7f99b340b1e8 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2510.542160] env[61663]: INFO nova.compute.manager [None req-6427d398-dbd8-4812-9607-7f99b340b1e8 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2510.542399] env[61663]: DEBUG oslo.service.loopingcall [None req-6427d398-dbd8-4812-9607-7f99b340b1e8 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2510.542792] env[61663]: DEBUG nova.compute.manager [-] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2510.542894] env[61663]: DEBUG nova.network.neutron [-] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2510.557948] env[61663]: DEBUG oslo_concurrency.lockutils [None req-daad2c63-2259-418d-8d1e-139537dfebac tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Lock "5002feba-1ffc-4957-ad11-712fce784ef4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.802s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2510.566053] env[61663]: DEBUG nova.network.neutron [-] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2510.567961] env[61663]: DEBUG nova.compute.manager [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2510.573944] env[61663]: INFO nova.compute.manager [-] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] Took 0.03 seconds to deallocate network for instance. [ 2510.617432] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2510.617964] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2510.619104] env[61663]: INFO nova.compute.claims [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2510.655874] env[61663]: DEBUG oslo_concurrency.lockutils [None req-6427d398-dbd8-4812-9607-7f99b340b1e8 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Lock "740f7887-4a5c-4889-9635-e9d9c6607ee7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.159s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2510.656836] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "740f7887-4a5c-4889-9635-e9d9c6607ee7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 94.571s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2510.657038] env[61663]: INFO nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 740f7887-4a5c-4889-9635-e9d9c6607ee7] During sync_power_state the instance has a pending task (deleting). Skip. [ 2510.657189] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "740f7887-4a5c-4889-9635-e9d9c6607ee7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2510.691601] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2510.691983] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2510.692201] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2510.704915] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2510.816475] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e4f6f7-ad05-4205-b71b-c103d9acfb66 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2510.824385] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a711ef0-3d86-42ea-8c73-53a133f9a6c9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2510.856652] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf619d5-d75b-4d52-8f89-a12ecd5c4b19 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2510.863857] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d49eab50-2509-4037-b0de-88a4bd765b71 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2510.876813] env[61663]: DEBUG nova.compute.provider_tree [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2510.885391] env[61663]: DEBUG nova.scheduler.client.report [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2510.900011] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.282s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2510.900508] env[61663]: DEBUG nova.compute.manager [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2510.903023] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.198s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2510.903225] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2510.903709] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2510.904683] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-507ebb6e-afda-4a7e-ae2a-4658e4f43fc1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2510.913250] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3463c4-1189-4bd1-83c5-90dece8467b5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2510.927090] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23319b89-cdd5-4cb3-b2c1-615755d14b93 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2510.933489] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b86c0ab3-68ab-46c1-86a1-a6cda51d9236 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2510.937904] env[61663]: DEBUG nova.compute.utils [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2510.939442] env[61663]: DEBUG nova.compute.manager [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2510.939611] env[61663]: DEBUG nova.network.neutron [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2510.966686] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181317MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2510.966877] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2510.967091] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2510.969146] env[61663]: DEBUG nova.compute.manager [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2511.030596] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 54a78c20-cbf6-453b-88e4-2fb4da0a6200 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2511.030757] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 8e705624-9787-4d34-a3d4-f56b7b4fdcc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2511.030917] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance e47c9821-f815-4bd5-bf00-8822f08e3333 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2511.031081] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance c21a5af5-004b-4544-bcf0-f105d6f336c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2511.031211] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 202e0f58-b057-4e57-8a92-c06d6efda570 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2511.031330] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 67f8162d-a631-4f0e-b03c-fd76ee131615 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2511.031488] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 029ec7ad-96a1-42e0-a926-c1aab1de05a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2511.031572] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 0adee33d-8d0c-4bcf-8df4-11465be00485 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2511.031750] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2511.031831] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2511.036456] env[61663]: DEBUG nova.policy [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e32a3ed536f140a2b10532295b389127', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ab3cef71bf2e45d6b45c5a77b2b13670', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 2511.041667] env[61663]: DEBUG nova.compute.manager [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2511.045018] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 8fc36ed9-9315-4bdb-b4f3-248106a3c681 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2511.055354] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 0266b3f5-ee31-46d7-af5e-844a27bfd829 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2511.065084] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 911c036c-c7d8-4ff7-b874-335361fb5281 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2511.065313] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2511.065462] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2511.069605] env[61663]: DEBUG nova.virt.hardware [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2511.069834] env[61663]: DEBUG nova.virt.hardware [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2511.069994] env[61663]: DEBUG nova.virt.hardware [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2511.070190] env[61663]: DEBUG nova.virt.hardware [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2511.070338] env[61663]: DEBUG nova.virt.hardware [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2511.070487] env[61663]: DEBUG nova.virt.hardware [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2511.070688] env[61663]: DEBUG nova.virt.hardware [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2511.070849] env[61663]: DEBUG nova.virt.hardware [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2511.071049] env[61663]: DEBUG nova.virt.hardware [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2511.071226] env[61663]: DEBUG nova.virt.hardware [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2511.071400] env[61663]: DEBUG nova.virt.hardware [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2511.072743] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded37196-22b7-4179-9735-f663633d5fbf {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.082419] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f5ff198-a94e-4616-9ba1-c596c8aea60d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.239653] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a294b8d-e905-4940-85ff-5cb36302fa23 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.246785] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437b8c47-9a80-4c02-8c7c-a2d865338bdd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.277264] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdabff02-ed38-454b-b290-8c12459081a8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.284633] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7348e007-d902-42dd-8736-5cda2613e1e4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2511.992603] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2511.994328] env[61663]: DEBUG nova.network.neutron [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Successfully created port: f028b3e4-8b87-468e-8082-9378f025dd3e {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2512.000920] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2512.014564] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2512.014749] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.048s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2512.906381] env[61663]: DEBUG nova.compute.manager [req-7b80563a-1664-4d7c-b4b8-bc7f7c83bf48 req-75942726-5cdd-42de-b407-8da26ec1e815 service nova] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Received event network-vif-plugged-f028b3e4-8b87-468e-8082-9378f025dd3e {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2512.906596] env[61663]: DEBUG oslo_concurrency.lockutils [req-7b80563a-1664-4d7c-b4b8-bc7f7c83bf48 req-75942726-5cdd-42de-b407-8da26ec1e815 service nova] Acquiring lock "5bc71c5f-20d1-47bf-ac1e-9b32c32b198e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2512.906797] env[61663]: DEBUG oslo_concurrency.lockutils [req-7b80563a-1664-4d7c-b4b8-bc7f7c83bf48 req-75942726-5cdd-42de-b407-8da26ec1e815 service nova] Lock "5bc71c5f-20d1-47bf-ac1e-9b32c32b198e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2512.906958] env[61663]: DEBUG oslo_concurrency.lockutils [req-7b80563a-1664-4d7c-b4b8-bc7f7c83bf48 req-75942726-5cdd-42de-b407-8da26ec1e815 service nova] Lock "5bc71c5f-20d1-47bf-ac1e-9b32c32b198e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2512.907599] env[61663]: DEBUG nova.compute.manager [req-7b80563a-1664-4d7c-b4b8-bc7f7c83bf48 req-75942726-5cdd-42de-b407-8da26ec1e815 service nova] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] No waiting events found dispatching network-vif-plugged-f028b3e4-8b87-468e-8082-9378f025dd3e {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2512.907855] env[61663]: WARNING nova.compute.manager [req-7b80563a-1664-4d7c-b4b8-bc7f7c83bf48 req-75942726-5cdd-42de-b407-8da26ec1e815 service nova] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Received unexpected event network-vif-plugged-f028b3e4-8b87-468e-8082-9378f025dd3e for instance with vm_state building and task_state spawning. [ 2512.985455] env[61663]: DEBUG nova.network.neutron [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Successfully updated port: f028b3e4-8b87-468e-8082-9378f025dd3e {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2512.999142] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Acquiring lock "refresh_cache-5bc71c5f-20d1-47bf-ac1e-9b32c32b198e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2512.999418] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Acquired lock "refresh_cache-5bc71c5f-20d1-47bf-ac1e-9b32c32b198e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2512.999418] env[61663]: DEBUG nova.network.neutron [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2513.067228] env[61663]: DEBUG nova.network.neutron [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2513.397189] env[61663]: DEBUG nova.network.neutron [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Updating instance_info_cache with network_info: [{"id": "f028b3e4-8b87-468e-8082-9378f025dd3e", "address": "fa:16:3e:e4:74:8f", "network": {"id": "0bf1355e-3265-4cdb-9b3e-f1e1876f43b6", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1573123323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ab3cef71bf2e45d6b45c5a77b2b13670", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf028b3e4-8b", "ovs_interfaceid": "f028b3e4-8b87-468e-8082-9378f025dd3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2513.410085] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Releasing lock "refresh_cache-5bc71c5f-20d1-47bf-ac1e-9b32c32b198e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2513.410559] env[61663]: DEBUG nova.compute.manager [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Instance network_info: |[{"id": "f028b3e4-8b87-468e-8082-9378f025dd3e", "address": "fa:16:3e:e4:74:8f", "network": {"id": "0bf1355e-3265-4cdb-9b3e-f1e1876f43b6", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1573123323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ab3cef71bf2e45d6b45c5a77b2b13670", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf028b3e4-8b", "ovs_interfaceid": "f028b3e4-8b87-468e-8082-9378f025dd3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2513.410814] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:74:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ecc4615-18f0-4324-8e16-5e5d513325e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f028b3e4-8b87-468e-8082-9378f025dd3e', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2513.418161] env[61663]: DEBUG oslo.service.loopingcall [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2513.418543] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2513.418765] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3558e4e1-6fcc-4af7-b0bd-0d8d707d375d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2513.438779] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2513.438779] env[61663]: value = "task-1690881" [ 2513.438779] env[61663]: _type = "Task" [ 2513.438779] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2513.446333] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690881, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2513.948727] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690881, 'name': CreateVM_Task, 'duration_secs': 0.289527} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2513.948910] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2513.955037] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2513.955214] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2513.955536] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2513.955770] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79f2bcc6-1551-413d-a1a3-599a242d1bb6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2513.960317] env[61663]: DEBUG oslo_vmware.api [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Waiting for the task: (returnval){ [ 2513.960317] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5216e252-d610-358f-97ba-0725b8be6cb2" [ 2513.960317] env[61663]: _type = "Task" [ 2513.960317] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2513.967972] env[61663]: DEBUG oslo_vmware.api [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5216e252-d610-358f-97ba-0725b8be6cb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2514.471736] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2514.472017] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2514.472223] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2514.934615] env[61663]: DEBUG nova.compute.manager [req-2b50f4cd-720e-4c53-bec3-3a94e41688ef req-4aac2d24-52ea-43bc-ad02-d9b203fbc993 service nova] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Received event network-changed-f028b3e4-8b87-468e-8082-9378f025dd3e {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2514.934806] env[61663]: DEBUG nova.compute.manager [req-2b50f4cd-720e-4c53-bec3-3a94e41688ef req-4aac2d24-52ea-43bc-ad02-d9b203fbc993 service nova] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Refreshing instance network info cache due to event network-changed-f028b3e4-8b87-468e-8082-9378f025dd3e. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2514.935041] env[61663]: DEBUG oslo_concurrency.lockutils [req-2b50f4cd-720e-4c53-bec3-3a94e41688ef req-4aac2d24-52ea-43bc-ad02-d9b203fbc993 service nova] Acquiring lock "refresh_cache-5bc71c5f-20d1-47bf-ac1e-9b32c32b198e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2514.935208] env[61663]: DEBUG oslo_concurrency.lockutils [req-2b50f4cd-720e-4c53-bec3-3a94e41688ef req-4aac2d24-52ea-43bc-ad02-d9b203fbc993 service nova] Acquired lock "refresh_cache-5bc71c5f-20d1-47bf-ac1e-9b32c32b198e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2514.935388] env[61663]: DEBUG nova.network.neutron [req-2b50f4cd-720e-4c53-bec3-3a94e41688ef req-4aac2d24-52ea-43bc-ad02-d9b203fbc993 service nova] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Refreshing network info cache for port f028b3e4-8b87-468e-8082-9378f025dd3e {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2515.237301] env[61663]: DEBUG nova.network.neutron [req-2b50f4cd-720e-4c53-bec3-3a94e41688ef req-4aac2d24-52ea-43bc-ad02-d9b203fbc993 service nova] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Updated VIF entry in instance network info cache for port f028b3e4-8b87-468e-8082-9378f025dd3e. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2515.237645] env[61663]: DEBUG nova.network.neutron [req-2b50f4cd-720e-4c53-bec3-3a94e41688ef req-4aac2d24-52ea-43bc-ad02-d9b203fbc993 service nova] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Updating instance_info_cache with network_info: [{"id": "f028b3e4-8b87-468e-8082-9378f025dd3e", "address": "fa:16:3e:e4:74:8f", "network": {"id": "0bf1355e-3265-4cdb-9b3e-f1e1876f43b6", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1573123323-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ab3cef71bf2e45d6b45c5a77b2b13670", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf028b3e4-8b", "ovs_interfaceid": "f028b3e4-8b87-468e-8082-9378f025dd3e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2515.246911] env[61663]: DEBUG oslo_concurrency.lockutils [req-2b50f4cd-720e-4c53-bec3-3a94e41688ef req-4aac2d24-52ea-43bc-ad02-d9b203fbc993 service nova] Releasing lock "refresh_cache-5bc71c5f-20d1-47bf-ac1e-9b32c32b198e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2517.015123] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2517.015482] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2519.687706] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2521.728321] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a081b1bf-e9f7-4748-974c-ffadcd8bef6a tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Acquiring lock "5bc71c5f-20d1-47bf-ac1e-9b32c32b198e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2528.117250] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Acquiring lock "bcc3a109-50ca-4a22-90f3-609231a3e95f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2528.117250] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Lock "bcc3a109-50ca-4a22-90f3-609231a3e95f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2559.738054] env[61663]: WARNING oslo_vmware.rw_handles [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2559.738054] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2559.738054] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2559.738054] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2559.738054] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2559.738054] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 2559.738054] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2559.738054] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2559.738054] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2559.738054] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2559.738054] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2559.738054] env[61663]: ERROR oslo_vmware.rw_handles [ 2559.738738] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/d1f9919f-1902-4f8c-8240-2b293dcddab5/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2559.740891] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2559.741223] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Copying Virtual Disk [datastore1] vmware_temp/d1f9919f-1902-4f8c-8240-2b293dcddab5/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/d1f9919f-1902-4f8c-8240-2b293dcddab5/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2559.741493] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e69c6f40-ebea-4dba-80d1-9a56cdfad29a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2559.750509] env[61663]: DEBUG oslo_vmware.api [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Waiting for the task: (returnval){ [ 2559.750509] env[61663]: value = "task-1690882" [ 2559.750509] env[61663]: _type = "Task" [ 2559.750509] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2559.757975] env[61663]: DEBUG oslo_vmware.api [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Task: {'id': task-1690882, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2560.261248] env[61663]: DEBUG oslo_vmware.exceptions [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2560.261531] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2560.262117] env[61663]: ERROR nova.compute.manager [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2560.262117] env[61663]: Faults: ['InvalidArgument'] [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Traceback (most recent call last): [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] yield resources [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] self.driver.spawn(context, instance, image_meta, [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] self._fetch_image_if_missing(context, vi) [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] image_cache(vi, tmp_image_ds_loc) [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] vm_util.copy_virtual_disk( [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] session._wait_for_task(vmdk_copy_task) [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] return self.wait_for_task(task_ref) [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] return evt.wait() [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] result = hub.switch() [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] return self.greenlet.switch() [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] self.f(*self.args, **self.kw) [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] raise exceptions.translate_fault(task_info.error) [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Faults: ['InvalidArgument'] [ 2560.262117] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] [ 2560.263264] env[61663]: INFO nova.compute.manager [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Terminating instance [ 2560.264075] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2560.264292] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2560.264534] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f88217ce-f7d0-4c46-abc6-732fc9219485 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2560.266668] env[61663]: DEBUG nova.compute.manager [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2560.266867] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2560.267584] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ea7d4a-d275-427d-8bbf-c063492a9f24 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2560.274594] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2560.274810] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a3aea696-e166-4411-9ed0-c5c7cca38417 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2560.276894] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2560.277077] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2560.277986] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-434ab06c-b6b4-4c92-b3a4-5a89b807c154 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2560.282476] env[61663]: DEBUG oslo_vmware.api [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Waiting for the task: (returnval){ [ 2560.282476] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5242bd94-1742-5449-7efc-e3d330dac3ba" [ 2560.282476] env[61663]: _type = "Task" [ 2560.282476] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2560.290507] env[61663]: DEBUG oslo_vmware.api [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5242bd94-1742-5449-7efc-e3d330dac3ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2560.344622] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2560.344865] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2560.345058] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Deleting the datastore file [datastore1] 54a78c20-cbf6-453b-88e4-2fb4da0a6200 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2560.345318] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9eddd6fd-75f4-4bc4-9609-47192917dd64 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2560.352666] env[61663]: DEBUG oslo_vmware.api [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Waiting for the task: (returnval){ [ 2560.352666] env[61663]: value = "task-1690884" [ 2560.352666] env[61663]: _type = "Task" [ 2560.352666] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2560.361395] env[61663]: DEBUG oslo_vmware.api [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Task: {'id': task-1690884, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2560.692480] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2560.692753] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2560.792460] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2560.792771] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Creating directory with path [datastore1] vmware_temp/2ad610ef-0826-48bb-90e8-a29d557257c1/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2560.792908] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4fbbf98c-04e7-4ead-9b9f-f3f69f88cc99 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2560.804389] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Created directory with path [datastore1] vmware_temp/2ad610ef-0826-48bb-90e8-a29d557257c1/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2560.804583] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Fetch image to [datastore1] vmware_temp/2ad610ef-0826-48bb-90e8-a29d557257c1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2560.804753] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/2ad610ef-0826-48bb-90e8-a29d557257c1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2560.805461] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d75089d-de0d-44a9-ab76-83eb588b7107 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2560.811823] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f5980b-9d83-4dd8-9915-303cafe15444 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2560.820447] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5bf1d04-0ff0-48b6-99a4-7916b98b3d1a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2560.849827] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-929516f0-9535-4cbe-88a6-7c83e1db213d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2560.857995] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1e951fc9-e1f5-477d-8003-cce1eeabd47c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2560.862198] env[61663]: DEBUG oslo_vmware.api [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Task: {'id': task-1690884, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081338} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2560.862739] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2560.862926] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2560.863115] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2560.863293] env[61663]: INFO nova.compute.manager [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2560.865391] env[61663]: DEBUG nova.compute.claims [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2560.865585] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2560.865806] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2560.882570] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2560.937103] env[61663]: DEBUG oslo_vmware.rw_handles [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2ad610ef-0826-48bb-90e8-a29d557257c1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2561.000023] env[61663]: DEBUG oslo_vmware.rw_handles [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2561.000222] env[61663]: DEBUG oslo_vmware.rw_handles [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2ad610ef-0826-48bb-90e8-a29d557257c1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2561.113837] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a35fa20c-4d60-4db6-a2ff-868ee5496620 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2561.121354] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d42d1be-d2a5-4e50-9959-1c9e931c6848 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2561.151107] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4aeb3e4-b71a-46c4-8cb2-9fecbcf7a70b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2561.157846] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50987a57-c3c3-4bac-a8df-4984d14e3573 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2561.174841] env[61663]: DEBUG nova.compute.provider_tree [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2561.201662] env[61663]: DEBUG nova.scheduler.client.report [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2561.223425] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.357s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2561.223976] env[61663]: ERROR nova.compute.manager [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2561.223976] env[61663]: Faults: ['InvalidArgument'] [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Traceback (most recent call last): [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] self.driver.spawn(context, instance, image_meta, [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] self._fetch_image_if_missing(context, vi) [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] image_cache(vi, tmp_image_ds_loc) [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] vm_util.copy_virtual_disk( [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] session._wait_for_task(vmdk_copy_task) [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] return self.wait_for_task(task_ref) [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] return evt.wait() [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] result = hub.switch() [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] return self.greenlet.switch() [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] self.f(*self.args, **self.kw) [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] raise exceptions.translate_fault(task_info.error) [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Faults: ['InvalidArgument'] [ 2561.223976] env[61663]: ERROR nova.compute.manager [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] [ 2561.225027] env[61663]: DEBUG nova.compute.utils [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2561.226155] env[61663]: DEBUG nova.compute.manager [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Build of instance 54a78c20-cbf6-453b-88e4-2fb4da0a6200 was re-scheduled: A specified parameter was not correct: fileType [ 2561.226155] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2561.226610] env[61663]: DEBUG nova.compute.manager [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2561.226783] env[61663]: DEBUG nova.compute.manager [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2561.226957] env[61663]: DEBUG nova.compute.manager [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2561.227143] env[61663]: DEBUG nova.network.neutron [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2561.491567] env[61663]: DEBUG nova.network.neutron [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2561.501707] env[61663]: INFO nova.compute.manager [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Took 0.27 seconds to deallocate network for instance. [ 2561.600194] env[61663]: INFO nova.scheduler.client.report [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Deleted allocations for instance 54a78c20-cbf6-453b-88e4-2fb4da0a6200 [ 2561.621010] env[61663]: DEBUG oslo_concurrency.lockutils [None req-1858f411-afb2-46be-ae2f-78d91b13162b tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Lock "54a78c20-cbf6-453b-88e4-2fb4da0a6200" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 628.608s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2561.622202] env[61663]: DEBUG oslo_concurrency.lockutils [None req-55f684e3-ea8b-48c3-bf81-a440bafa2994 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Lock "54a78c20-cbf6-453b-88e4-2fb4da0a6200" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 432.668s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2561.622449] env[61663]: DEBUG oslo_concurrency.lockutils [None req-55f684e3-ea8b-48c3-bf81-a440bafa2994 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Acquiring lock "54a78c20-cbf6-453b-88e4-2fb4da0a6200-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2561.622665] env[61663]: DEBUG oslo_concurrency.lockutils [None req-55f684e3-ea8b-48c3-bf81-a440bafa2994 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Lock "54a78c20-cbf6-453b-88e4-2fb4da0a6200-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2561.622834] env[61663]: DEBUG oslo_concurrency.lockutils [None req-55f684e3-ea8b-48c3-bf81-a440bafa2994 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Lock "54a78c20-cbf6-453b-88e4-2fb4da0a6200-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2561.625533] env[61663]: INFO nova.compute.manager [None req-55f684e3-ea8b-48c3-bf81-a440bafa2994 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Terminating instance [ 2561.627505] env[61663]: DEBUG nova.compute.manager [None req-55f684e3-ea8b-48c3-bf81-a440bafa2994 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2561.627699] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-55f684e3-ea8b-48c3-bf81-a440bafa2994 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2561.628590] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64436544-29da-4337-8b76-c5aa3fcb47a3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2561.638617] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b13c46b-5d78-4eaf-bf04-4f6ea70eb307 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2561.649955] env[61663]: DEBUG nova.compute.manager [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2561.671584] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-55f684e3-ea8b-48c3-bf81-a440bafa2994 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 54a78c20-cbf6-453b-88e4-2fb4da0a6200 could not be found. [ 2561.671821] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-55f684e3-ea8b-48c3-bf81-a440bafa2994 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2561.672019] env[61663]: INFO nova.compute.manager [None req-55f684e3-ea8b-48c3-bf81-a440bafa2994 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2561.672293] env[61663]: DEBUG oslo.service.loopingcall [None req-55f684e3-ea8b-48c3-bf81-a440bafa2994 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2561.672553] env[61663]: DEBUG nova.compute.manager [-] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2561.672650] env[61663]: DEBUG nova.network.neutron [-] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2561.707166] env[61663]: DEBUG nova.network.neutron [-] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2561.709421] env[61663]: DEBUG oslo_concurrency.lockutils [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2561.709656] env[61663]: DEBUG oslo_concurrency.lockutils [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2561.711391] env[61663]: INFO nova.compute.claims [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2561.716348] env[61663]: INFO nova.compute.manager [-] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] Took 0.04 seconds to deallocate network for instance. [ 2561.818918] env[61663]: DEBUG oslo_concurrency.lockutils [None req-55f684e3-ea8b-48c3-bf81-a440bafa2994 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Lock "54a78c20-cbf6-453b-88e4-2fb4da0a6200" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.197s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2561.820092] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "54a78c20-cbf6-453b-88e4-2fb4da0a6200" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 145.734s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2561.820304] env[61663]: INFO nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 54a78c20-cbf6-453b-88e4-2fb4da0a6200] During sync_power_state the instance has a pending task (deleting). Skip. [ 2561.820491] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "54a78c20-cbf6-453b-88e4-2fb4da0a6200" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2561.933063] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3180cf8e-61ae-4e73-9cd9-df7806fd40dc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2561.940734] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679b3447-d0cb-4b20-aa5d-4eb23473efce {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2561.971542] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7583e1e-8e35-4d99-87f6-8f8d707ba06a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2561.979519] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-099fbfb9-1bd3-44b1-97f3-01f68b29031c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2561.993545] env[61663]: DEBUG nova.compute.provider_tree [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2562.003057] env[61663]: DEBUG nova.scheduler.client.report [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2562.015493] env[61663]: DEBUG oslo_concurrency.lockutils [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.306s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2562.015938] env[61663]: DEBUG nova.compute.manager [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2562.047665] env[61663]: DEBUG nova.compute.utils [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2562.049007] env[61663]: DEBUG nova.compute.manager [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2562.049185] env[61663]: DEBUG nova.network.neutron [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2562.057367] env[61663]: DEBUG nova.compute.manager [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2562.112530] env[61663]: DEBUG nova.policy [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ff4e0b0a90ee4eb1b471509c6a1dd60b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67f23651373c47be8ea682898e598f3b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 2562.119023] env[61663]: DEBUG nova.compute.manager [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2562.144253] env[61663]: DEBUG nova.virt.hardware [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2562.144508] env[61663]: DEBUG nova.virt.hardware [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2562.144666] env[61663]: DEBUG nova.virt.hardware [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2562.144849] env[61663]: DEBUG nova.virt.hardware [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2562.144997] env[61663]: DEBUG nova.virt.hardware [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2562.145163] env[61663]: DEBUG nova.virt.hardware [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2562.145367] env[61663]: DEBUG nova.virt.hardware [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2562.145526] env[61663]: DEBUG nova.virt.hardware [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2562.145689] env[61663]: DEBUG nova.virt.hardware [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2562.145848] env[61663]: DEBUG nova.virt.hardware [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2562.146028] env[61663]: DEBUG nova.virt.hardware [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2562.146877] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8e9df9-9136-4d0d-8ed2-580465c2748e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2562.156275] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee332680-f02b-4e71-84ee-92b38dcbe1f4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2562.436522] env[61663]: DEBUG nova.network.neutron [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Successfully created port: 64bdd32f-9f55-4427-b400-918734033fc5 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2563.315114] env[61663]: DEBUG nova.network.neutron [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Successfully updated port: 64bdd32f-9f55-4427-b400-918734033fc5 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2563.328619] env[61663]: DEBUG oslo_concurrency.lockutils [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Acquiring lock "refresh_cache-8fc36ed9-9315-4bdb-b4f3-248106a3c681" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2563.328977] env[61663]: DEBUG oslo_concurrency.lockutils [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Acquired lock "refresh_cache-8fc36ed9-9315-4bdb-b4f3-248106a3c681" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2563.329624] env[61663]: DEBUG nova.network.neutron [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2563.366262] env[61663]: DEBUG nova.network.neutron [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2563.527842] env[61663]: DEBUG nova.network.neutron [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Updating instance_info_cache with network_info: [{"id": "64bdd32f-9f55-4427-b400-918734033fc5", "address": "fa:16:3e:22:cb:86", "network": {"id": "3ea6c793-2cad-4ae1-981e-e05421195e1f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1122825267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67f23651373c47be8ea682898e598f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64bdd32f-9f", "ovs_interfaceid": "64bdd32f-9f55-4427-b400-918734033fc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2563.541344] env[61663]: DEBUG oslo_concurrency.lockutils [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Releasing lock "refresh_cache-8fc36ed9-9315-4bdb-b4f3-248106a3c681" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2563.541801] env[61663]: DEBUG nova.compute.manager [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Instance network_info: |[{"id": "64bdd32f-9f55-4427-b400-918734033fc5", "address": "fa:16:3e:22:cb:86", "network": {"id": "3ea6c793-2cad-4ae1-981e-e05421195e1f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1122825267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67f23651373c47be8ea682898e598f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64bdd32f-9f", "ovs_interfaceid": "64bdd32f-9f55-4427-b400-918734033fc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2563.542731] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:cb:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7150f662-0cf1-44f9-ae14-d70f479649b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '64bdd32f-9f55-4427-b400-918734033fc5', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2563.550906] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Creating folder: Project (67f23651373c47be8ea682898e598f3b). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2563.551415] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3d4dac40-cb93-41f3-bdb5-e4461693acea {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2563.556176] env[61663]: DEBUG nova.compute.manager [req-c6efaabb-f720-47e4-802a-9125015e98b8 req-36439980-3653-4290-bd52-da4d246dd761 service nova] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Received event network-vif-plugged-64bdd32f-9f55-4427-b400-918734033fc5 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2563.556324] env[61663]: DEBUG oslo_concurrency.lockutils [req-c6efaabb-f720-47e4-802a-9125015e98b8 req-36439980-3653-4290-bd52-da4d246dd761 service nova] Acquiring lock "8fc36ed9-9315-4bdb-b4f3-248106a3c681-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2563.556532] env[61663]: DEBUG oslo_concurrency.lockutils [req-c6efaabb-f720-47e4-802a-9125015e98b8 req-36439980-3653-4290-bd52-da4d246dd761 service nova] Lock "8fc36ed9-9315-4bdb-b4f3-248106a3c681-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2563.556698] env[61663]: DEBUG oslo_concurrency.lockutils [req-c6efaabb-f720-47e4-802a-9125015e98b8 req-36439980-3653-4290-bd52-da4d246dd761 service nova] Lock "8fc36ed9-9315-4bdb-b4f3-248106a3c681-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2563.556864] env[61663]: DEBUG nova.compute.manager [req-c6efaabb-f720-47e4-802a-9125015e98b8 req-36439980-3653-4290-bd52-da4d246dd761 service nova] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] No waiting events found dispatching network-vif-plugged-64bdd32f-9f55-4427-b400-918734033fc5 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2563.557040] env[61663]: WARNING nova.compute.manager [req-c6efaabb-f720-47e4-802a-9125015e98b8 req-36439980-3653-4290-bd52-da4d246dd761 service nova] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Received unexpected event network-vif-plugged-64bdd32f-9f55-4427-b400-918734033fc5 for instance with vm_state building and task_state spawning. [ 2563.557209] env[61663]: DEBUG nova.compute.manager [req-c6efaabb-f720-47e4-802a-9125015e98b8 req-36439980-3653-4290-bd52-da4d246dd761 service nova] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Received event network-changed-64bdd32f-9f55-4427-b400-918734033fc5 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2563.557364] env[61663]: DEBUG nova.compute.manager [req-c6efaabb-f720-47e4-802a-9125015e98b8 req-36439980-3653-4290-bd52-da4d246dd761 service nova] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Refreshing instance network info cache due to event network-changed-64bdd32f-9f55-4427-b400-918734033fc5. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2563.557550] env[61663]: DEBUG oslo_concurrency.lockutils [req-c6efaabb-f720-47e4-802a-9125015e98b8 req-36439980-3653-4290-bd52-da4d246dd761 service nova] Acquiring lock "refresh_cache-8fc36ed9-9315-4bdb-b4f3-248106a3c681" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2563.557687] env[61663]: DEBUG oslo_concurrency.lockutils [req-c6efaabb-f720-47e4-802a-9125015e98b8 req-36439980-3653-4290-bd52-da4d246dd761 service nova] Acquired lock "refresh_cache-8fc36ed9-9315-4bdb-b4f3-248106a3c681" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2563.557842] env[61663]: DEBUG nova.network.neutron [req-c6efaabb-f720-47e4-802a-9125015e98b8 req-36439980-3653-4290-bd52-da4d246dd761 service nova] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Refreshing network info cache for port 64bdd32f-9f55-4427-b400-918734033fc5 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2563.563715] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Created folder: Project (67f23651373c47be8ea682898e598f3b) in parent group-v352575. [ 2563.563821] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Creating folder: Instances. Parent ref: group-v352690. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2563.564010] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f9575b9-9c90-4668-9446-61ce60cbbbb6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2563.576042] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Created folder: Instances in parent group-v352690. [ 2563.576042] env[61663]: DEBUG oslo.service.loopingcall [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2563.576235] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2563.576431] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5d79be10-5b9b-4829-ad5f-32bf207a539d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2563.594731] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2563.594731] env[61663]: value = "task-1690887" [ 2563.594731] env[61663]: _type = "Task" [ 2563.594731] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2563.603814] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690887, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2563.809043] env[61663]: DEBUG nova.network.neutron [req-c6efaabb-f720-47e4-802a-9125015e98b8 req-36439980-3653-4290-bd52-da4d246dd761 service nova] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Updated VIF entry in instance network info cache for port 64bdd32f-9f55-4427-b400-918734033fc5. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2563.809642] env[61663]: DEBUG nova.network.neutron [req-c6efaabb-f720-47e4-802a-9125015e98b8 req-36439980-3653-4290-bd52-da4d246dd761 service nova] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Updating instance_info_cache with network_info: [{"id": "64bdd32f-9f55-4427-b400-918734033fc5", "address": "fa:16:3e:22:cb:86", "network": {"id": "3ea6c793-2cad-4ae1-981e-e05421195e1f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1122825267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67f23651373c47be8ea682898e598f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64bdd32f-9f", "ovs_interfaceid": "64bdd32f-9f55-4427-b400-918734033fc5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2563.818949] env[61663]: DEBUG oslo_concurrency.lockutils [req-c6efaabb-f720-47e4-802a-9125015e98b8 req-36439980-3653-4290-bd52-da4d246dd761 service nova] Releasing lock "refresh_cache-8fc36ed9-9315-4bdb-b4f3-248106a3c681" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2564.104439] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690887, 'name': CreateVM_Task, 'duration_secs': 0.285518} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2564.104623] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2564.105334] env[61663]: DEBUG oslo_concurrency.lockutils [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2564.105502] env[61663]: DEBUG oslo_concurrency.lockutils [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2564.105859] env[61663]: DEBUG oslo_concurrency.lockutils [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2564.106127] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fa61d5e-40cc-4eef-8a7f-3ef853e01326 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2564.110788] env[61663]: DEBUG oslo_vmware.api [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Waiting for the task: (returnval){ [ 2564.110788] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52409478-629c-e966-ff48-f66dd8b95195" [ 2564.110788] env[61663]: _type = "Task" [ 2564.110788] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2564.118276] env[61663]: DEBUG oslo_vmware.api [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52409478-629c-e966-ff48-f66dd8b95195, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2564.621451] env[61663]: DEBUG oslo_concurrency.lockutils [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2564.621838] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2564.621902] env[61663]: DEBUG oslo_concurrency.lockutils [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2566.688375] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2568.691993] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2568.692329] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2568.692392] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2568.717025] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2568.717201] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2568.717335] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2568.717465] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2568.717590] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2568.717710] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2568.717830] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2568.717948] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2568.718080] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2568.718202] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2568.718323] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2568.718824] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2569.109993] env[61663]: DEBUG oslo_concurrency.lockutils [None req-6b8dbaf1-4bdf-4e98-b8e7-7b0fd863c948 tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Acquiring lock "8fc36ed9-9315-4bdb-b4f3-248106a3c681" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2571.692717] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2571.693179] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2572.692396] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2572.704254] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2572.704626] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2572.704626] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2572.704802] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2572.705892] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-911fb96f-f4d3-4b10-b8d4-6778efcd8184 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2572.714614] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97fdb41b-8aa5-4825-bb61-57259a227d7a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2572.728048] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144d8b9d-9d24-42bf-bcc6-78073d90937f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2572.734367] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0792e98a-a0c1-44f9-a3d3-97f7671859c4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2572.763624] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181288MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2572.763750] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2572.763950] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2572.836852] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 8e705624-9787-4d34-a3d4-f56b7b4fdcc2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2572.837025] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance e47c9821-f815-4bd5-bf00-8822f08e3333 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2572.837164] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance c21a5af5-004b-4544-bcf0-f105d6f336c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2572.837287] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 202e0f58-b057-4e57-8a92-c06d6efda570 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2572.837416] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 67f8162d-a631-4f0e-b03c-fd76ee131615 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2572.837532] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 029ec7ad-96a1-42e0-a926-c1aab1de05a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2572.837649] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 0adee33d-8d0c-4bcf-8df4-11465be00485 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2572.837765] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2572.837881] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2572.837996] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 8fc36ed9-9315-4bdb-b4f3-248106a3c681 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2572.848513] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 0266b3f5-ee31-46d7-af5e-844a27bfd829 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2572.858550] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 911c036c-c7d8-4ff7-b874-335361fb5281 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2572.867349] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance bcc3a109-50ca-4a22-90f3-609231a3e95f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2572.867563] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2572.867710] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2573.010040] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa9badb0-db10-4c5d-b24c-79ca623bd088 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2573.017200] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a878a4c-ecf8-495e-b9e6-d4d4e3f2d301 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2573.045414] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d398db5-0db3-4d52-9a49-c9577dbf9298 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2573.052046] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d2067c9-0688-4cd5-af14-494b0bfac9bb {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2573.065701] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2573.073542] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2573.085776] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2573.085955] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.322s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2578.086067] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2578.086398] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2606.643521] env[61663]: WARNING oslo_vmware.rw_handles [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2606.643521] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2606.643521] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2606.643521] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2606.643521] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2606.643521] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 2606.643521] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2606.643521] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2606.643521] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2606.643521] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2606.643521] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2606.643521] env[61663]: ERROR oslo_vmware.rw_handles [ 2606.644395] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/2ad610ef-0826-48bb-90e8-a29d557257c1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2606.645846] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2606.646109] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Copying Virtual Disk [datastore1] vmware_temp/2ad610ef-0826-48bb-90e8-a29d557257c1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/2ad610ef-0826-48bb-90e8-a29d557257c1/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2606.646409] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c826e3bc-e51a-478b-a8a3-52ea01645324 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2606.656096] env[61663]: DEBUG oslo_vmware.api [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Waiting for the task: (returnval){ [ 2606.656096] env[61663]: value = "task-1690888" [ 2606.656096] env[61663]: _type = "Task" [ 2606.656096] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2606.663987] env[61663]: DEBUG oslo_vmware.api [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Task: {'id': task-1690888, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2607.166280] env[61663]: DEBUG oslo_vmware.exceptions [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2607.166561] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2607.167212] env[61663]: ERROR nova.compute.manager [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2607.167212] env[61663]: Faults: ['InvalidArgument'] [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Traceback (most recent call last): [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] yield resources [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] self.driver.spawn(context, instance, image_meta, [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] self._fetch_image_if_missing(context, vi) [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] image_cache(vi, tmp_image_ds_loc) [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] vm_util.copy_virtual_disk( [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] session._wait_for_task(vmdk_copy_task) [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] return self.wait_for_task(task_ref) [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] return evt.wait() [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] result = hub.switch() [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] return self.greenlet.switch() [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] self.f(*self.args, **self.kw) [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] raise exceptions.translate_fault(task_info.error) [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Faults: ['InvalidArgument'] [ 2607.167212] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] [ 2607.168421] env[61663]: INFO nova.compute.manager [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Terminating instance [ 2607.169153] env[61663]: DEBUG oslo_concurrency.lockutils [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2607.169366] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2607.169610] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-604c0215-d77d-4c23-b8d1-790647a5ebf0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.171654] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Acquiring lock "refresh_cache-8e705624-9787-4d34-a3d4-f56b7b4fdcc2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2607.171811] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Acquired lock "refresh_cache-8e705624-9787-4d34-a3d4-f56b7b4fdcc2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2607.171978] env[61663]: DEBUG nova.network.neutron [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2607.178876] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2607.179486] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2607.180228] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87ef06db-a51d-4e55-bcb0-f9d82edfeafe {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.188122] env[61663]: DEBUG oslo_vmware.api [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for the task: (returnval){ [ 2607.188122] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528c3aab-2f74-c7d8-6422-6b1f7c574cf0" [ 2607.188122] env[61663]: _type = "Task" [ 2607.188122] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2607.196276] env[61663]: DEBUG oslo_vmware.api [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528c3aab-2f74-c7d8-6422-6b1f7c574cf0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2607.215351] env[61663]: DEBUG nova.network.neutron [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2607.286423] env[61663]: DEBUG nova.network.neutron [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2607.295545] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Releasing lock "refresh_cache-8e705624-9787-4d34-a3d4-f56b7b4fdcc2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2607.295943] env[61663]: DEBUG nova.compute.manager [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2607.296158] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2607.297236] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa0daa4-eb34-4765-8c11-06486aca40e8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.305042] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2607.305214] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a6f6633a-81a6-4ba3-ad8b-6b41ea09b21d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.338076] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2607.339499] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2607.339499] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Deleting the datastore file [datastore1] 8e705624-9787-4d34-a3d4-f56b7b4fdcc2 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2607.339499] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e57b7c0-1611-4c5b-b237-f6e1c2349419 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.345729] env[61663]: DEBUG oslo_vmware.api [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Waiting for the task: (returnval){ [ 2607.345729] env[61663]: value = "task-1690890" [ 2607.345729] env[61663]: _type = "Task" [ 2607.345729] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2607.352723] env[61663]: DEBUG oslo_vmware.api [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Task: {'id': task-1690890, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2607.698202] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2607.698594] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Creating directory with path [datastore1] vmware_temp/980ada7f-7861-41bd-b827-5372a66eede3/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2607.698794] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-722299d2-dc1c-44dd-aa8d-c654fcac0202 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.710138] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Created directory with path [datastore1] vmware_temp/980ada7f-7861-41bd-b827-5372a66eede3/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2607.710334] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Fetch image to [datastore1] vmware_temp/980ada7f-7861-41bd-b827-5372a66eede3/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2607.710503] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/980ada7f-7861-41bd-b827-5372a66eede3/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2607.711246] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c538a5fb-efc7-4313-a01a-c72f80960ad8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.718985] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea48220f-ecaa-474f-b4ac-ae9b7ea009cf {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.727813] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ce4c3bc-5623-4b93-8d22-aae878b303ae {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.759232] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6acb2dea-df94-480c-82f4-e8ca356a13e4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.765817] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-eb91adfa-be8a-46d2-b47d-27d573119f79 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2607.788258] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2607.841483] env[61663]: DEBUG oslo_vmware.rw_handles [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/980ada7f-7861-41bd-b827-5372a66eede3/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2607.905141] env[61663]: DEBUG oslo_vmware.rw_handles [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2607.905331] env[61663]: DEBUG oslo_vmware.rw_handles [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/980ada7f-7861-41bd-b827-5372a66eede3/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2607.909235] env[61663]: DEBUG oslo_vmware.api [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Task: {'id': task-1690890, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.042359} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2607.909469] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2607.909649] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2607.909823] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2607.909995] env[61663]: INFO nova.compute.manager [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2607.910238] env[61663]: DEBUG oslo.service.loopingcall [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2607.910434] env[61663]: DEBUG nova.compute.manager [-] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Skipping network deallocation for instance since networking was not requested. {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 2607.912636] env[61663]: DEBUG nova.compute.claims [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2607.912816] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2607.913046] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2608.097265] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8bec30-3cfc-4879-9e20-90c814708260 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2608.104840] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf3296d7-2551-450b-85c3-e5749bcae8d5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2608.134950] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20e2702b-5462-4214-ad2a-acd7ba314b00 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2608.141513] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18a9368b-99f8-4623-81c9-a85ad80632e8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2608.154184] env[61663]: DEBUG nova.compute.provider_tree [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2608.162359] env[61663]: DEBUG nova.scheduler.client.report [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2608.174988] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.262s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2608.175507] env[61663]: ERROR nova.compute.manager [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2608.175507] env[61663]: Faults: ['InvalidArgument'] [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Traceback (most recent call last): [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] self.driver.spawn(context, instance, image_meta, [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] self._fetch_image_if_missing(context, vi) [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] image_cache(vi, tmp_image_ds_loc) [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] vm_util.copy_virtual_disk( [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] session._wait_for_task(vmdk_copy_task) [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] return self.wait_for_task(task_ref) [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] return evt.wait() [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] result = hub.switch() [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] return self.greenlet.switch() [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] self.f(*self.args, **self.kw) [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] raise exceptions.translate_fault(task_info.error) [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Faults: ['InvalidArgument'] [ 2608.175507] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] [ 2608.176499] env[61663]: DEBUG nova.compute.utils [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2608.177508] env[61663]: DEBUG nova.compute.manager [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Build of instance 8e705624-9787-4d34-a3d4-f56b7b4fdcc2 was re-scheduled: A specified parameter was not correct: fileType [ 2608.177508] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2608.177868] env[61663]: DEBUG nova.compute.manager [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2608.178122] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Acquiring lock "refresh_cache-8e705624-9787-4d34-a3d4-f56b7b4fdcc2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2608.178259] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Acquired lock "refresh_cache-8e705624-9787-4d34-a3d4-f56b7b4fdcc2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2608.178419] env[61663]: DEBUG nova.network.neutron [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2608.203866] env[61663]: DEBUG nova.network.neutron [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2608.263675] env[61663]: DEBUG nova.network.neutron [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2608.273979] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Releasing lock "refresh_cache-8e705624-9787-4d34-a3d4-f56b7b4fdcc2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2608.274222] env[61663]: DEBUG nova.compute.manager [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2608.274404] env[61663]: DEBUG nova.compute.manager [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Skipping network deallocation for instance since networking was not requested. {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 2608.379315] env[61663]: INFO nova.scheduler.client.report [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Deleted allocations for instance 8e705624-9787-4d34-a3d4-f56b7b4fdcc2 [ 2608.398020] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b8986cad-65a2-4732-a51f-39e8b40fcc6d tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Lock "8e705624-9787-4d34-a3d4-f56b7b4fdcc2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 633.260s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2608.399162] env[61663]: DEBUG oslo_concurrency.lockutils [None req-8b9561a1-1d76-4dc0-973a-225667f48d95 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Lock "8e705624-9787-4d34-a3d4-f56b7b4fdcc2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 436.329s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2608.399383] env[61663]: DEBUG oslo_concurrency.lockutils [None req-8b9561a1-1d76-4dc0-973a-225667f48d95 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Acquiring lock "8e705624-9787-4d34-a3d4-f56b7b4fdcc2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2608.399589] env[61663]: DEBUG oslo_concurrency.lockutils [None req-8b9561a1-1d76-4dc0-973a-225667f48d95 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Lock "8e705624-9787-4d34-a3d4-f56b7b4fdcc2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2608.399757] env[61663]: DEBUG oslo_concurrency.lockutils [None req-8b9561a1-1d76-4dc0-973a-225667f48d95 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Lock "8e705624-9787-4d34-a3d4-f56b7b4fdcc2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2608.401512] env[61663]: INFO nova.compute.manager [None req-8b9561a1-1d76-4dc0-973a-225667f48d95 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Terminating instance [ 2608.403350] env[61663]: DEBUG oslo_concurrency.lockutils [None req-8b9561a1-1d76-4dc0-973a-225667f48d95 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Acquiring lock "refresh_cache-8e705624-9787-4d34-a3d4-f56b7b4fdcc2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2608.403477] env[61663]: DEBUG oslo_concurrency.lockutils [None req-8b9561a1-1d76-4dc0-973a-225667f48d95 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Acquired lock "refresh_cache-8e705624-9787-4d34-a3d4-f56b7b4fdcc2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2608.403637] env[61663]: DEBUG nova.network.neutron [None req-8b9561a1-1d76-4dc0-973a-225667f48d95 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2608.412159] env[61663]: DEBUG nova.compute.manager [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2608.429573] env[61663]: DEBUG nova.network.neutron [None req-8b9561a1-1d76-4dc0-973a-225667f48d95 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2608.463414] env[61663]: DEBUG oslo_concurrency.lockutils [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2608.463676] env[61663]: DEBUG oslo_concurrency.lockutils [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2608.465551] env[61663]: INFO nova.compute.claims [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2608.528707] env[61663]: DEBUG nova.network.neutron [None req-8b9561a1-1d76-4dc0-973a-225667f48d95 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2608.538052] env[61663]: DEBUG oslo_concurrency.lockutils [None req-8b9561a1-1d76-4dc0-973a-225667f48d95 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Releasing lock "refresh_cache-8e705624-9787-4d34-a3d4-f56b7b4fdcc2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2608.538341] env[61663]: DEBUG nova.compute.manager [None req-8b9561a1-1d76-4dc0-973a-225667f48d95 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2608.538540] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-8b9561a1-1d76-4dc0-973a-225667f48d95 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2608.539070] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0f22cefb-fc5d-463b-876b-42f510f771cd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2608.549459] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b88d9096-e217-4752-99c3-a319f1d233cf {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2608.581295] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-8b9561a1-1d76-4dc0-973a-225667f48d95 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8e705624-9787-4d34-a3d4-f56b7b4fdcc2 could not be found. [ 2608.581514] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-8b9561a1-1d76-4dc0-973a-225667f48d95 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2608.581738] env[61663]: INFO nova.compute.manager [None req-8b9561a1-1d76-4dc0-973a-225667f48d95 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2608.582014] env[61663]: DEBUG oslo.service.loopingcall [None req-8b9561a1-1d76-4dc0-973a-225667f48d95 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2608.584419] env[61663]: DEBUG nova.compute.manager [-] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2608.584517] env[61663]: DEBUG nova.network.neutron [-] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2608.680878] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87a9fb50-3f25-4297-be5d-635e75beb502 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2608.688373] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-960e8027-e75d-4766-92d2-ea9f2e24056a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2608.717275] env[61663]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61663) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2608.717595] env[61663]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-e4c9bd34-2726-43ea-b5e1-420157ebae72'] [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall self._deallocate_network( [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2608.718142] env[61663]: ERROR oslo.service.loopingcall [ 2608.719796] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d27ef969-89c4-4818-860c-23809678c6db {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2608.721430] env[61663]: ERROR nova.compute.manager [None req-8b9561a1-1d76-4dc0-973a-225667f48d95 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2608.728819] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6736a3fc-4d6a-4ce9-8acb-32630c80650b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2608.741035] env[61663]: DEBUG nova.compute.provider_tree [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2608.749122] env[61663]: DEBUG nova.scheduler.client.report [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2608.757067] env[61663]: ERROR nova.compute.manager [None req-8b9561a1-1d76-4dc0-973a-225667f48d95 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Traceback (most recent call last): [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] ret = obj(*args, **kwargs) [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] exception_handler_v20(status_code, error_body) [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] raise client_exc(message=error_message, [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Neutron server returns request_ids: ['req-e4c9bd34-2726-43ea-b5e1-420157ebae72'] [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] During handling of the above exception, another exception occurred: [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Traceback (most recent call last): [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] self._delete_instance(context, instance, bdms) [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] self._shutdown_instance(context, instance, bdms) [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] self._try_deallocate_network(context, instance, requested_networks) [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] with excutils.save_and_reraise_exception(): [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] self.force_reraise() [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] raise self.value [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] _deallocate_network_with_retries() [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] return evt.wait() [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] result = hub.switch() [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] return self.greenlet.switch() [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] result = func(*self.args, **self.kw) [ 2608.757067] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] result = f(*args, **kwargs) [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] self._deallocate_network( [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] self.network_api.deallocate_for_instance( [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] data = neutron.list_ports(**search_opts) [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] ret = obj(*args, **kwargs) [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] return self.list('ports', self.ports_path, retrieve_all, [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] ret = obj(*args, **kwargs) [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] for r in self._pagination(collection, path, **params): [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] res = self.get(path, params=params) [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] ret = obj(*args, **kwargs) [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] return self.retry_request("GET", action, body=body, [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] ret = obj(*args, **kwargs) [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] return self.do_request(method, action, body=body, [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] ret = obj(*args, **kwargs) [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] self._handle_fault_response(status_code, replybody, resp) [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2608.758430] env[61663]: ERROR nova.compute.manager [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] [ 2608.765823] env[61663]: DEBUG oslo_concurrency.lockutils [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.302s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2608.766363] env[61663]: DEBUG nova.compute.manager [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2608.782017] env[61663]: DEBUG oslo_concurrency.lockutils [None req-8b9561a1-1d76-4dc0-973a-225667f48d95 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Lock "8e705624-9787-4d34-a3d4-f56b7b4fdcc2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.383s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2608.783082] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "8e705624-9787-4d34-a3d4-f56b7b4fdcc2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 192.697s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2608.783275] env[61663]: INFO nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] During sync_power_state the instance has a pending task (deleting). Skip. [ 2608.783454] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "8e705624-9787-4d34-a3d4-f56b7b4fdcc2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2608.812245] env[61663]: DEBUG nova.compute.utils [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2608.813496] env[61663]: DEBUG nova.compute.manager [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2608.813670] env[61663]: DEBUG nova.network.neutron [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2608.826036] env[61663]: DEBUG nova.compute.manager [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2608.842356] env[61663]: INFO nova.compute.manager [None req-8b9561a1-1d76-4dc0-973a-225667f48d95 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] [instance: 8e705624-9787-4d34-a3d4-f56b7b4fdcc2] Successfully reverted task state from None on failure for instance. [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server [None req-8b9561a1-1d76-4dc0-973a-225667f48d95 tempest-ServerShowV247Test-853330159 tempest-ServerShowV247Test-853330159-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-e4c9bd34-2726-43ea-b5e1-420157ebae72'] [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server raise self.value [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server raise self.value [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server raise self.value [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 2608.848345] env[61663]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server raise self.value [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server raise self.value [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server return evt.wait() [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2608.850109] env[61663]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2608.851881] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2608.851881] env[61663]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 2608.851881] env[61663]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2608.851881] env[61663]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2608.851881] env[61663]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2608.851881] env[61663]: ERROR oslo_messaging.rpc.server [ 2608.870071] env[61663]: DEBUG nova.policy [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '033e5ebd18fb421b8ad3f4ad5033f1b5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7bb1bdc9b1004ff591ab4e001d81b400', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 2608.888793] env[61663]: DEBUG nova.compute.manager [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2608.914774] env[61663]: DEBUG nova.virt.hardware [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2608.915051] env[61663]: DEBUG nova.virt.hardware [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2608.915258] env[61663]: DEBUG nova.virt.hardware [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2608.915451] env[61663]: DEBUG nova.virt.hardware [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2608.915602] env[61663]: DEBUG nova.virt.hardware [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2608.915789] env[61663]: DEBUG nova.virt.hardware [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2608.916036] env[61663]: DEBUG nova.virt.hardware [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2608.916238] env[61663]: DEBUG nova.virt.hardware [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2608.916414] env[61663]: DEBUG nova.virt.hardware [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2608.916607] env[61663]: DEBUG nova.virt.hardware [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2608.916794] env[61663]: DEBUG nova.virt.hardware [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2608.917642] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51d4287d-e417-4f8e-ab5c-fd4ea407965f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2608.925767] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f41bf40b-72de-4f06-bdcc-b96d6ef7e0f8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2609.255869] env[61663]: DEBUG nova.network.neutron [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Successfully created port: d52ed9bb-0c2a-4984-a221-80c5ad81205f {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2610.027325] env[61663]: DEBUG nova.compute.manager [req-7c5985d9-1fd4-4a4b-8cc3-5a544bfcfbde req-16e23e90-29be-437d-865a-09ee3fec2560 service nova] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Received event network-vif-plugged-d52ed9bb-0c2a-4984-a221-80c5ad81205f {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2610.027325] env[61663]: DEBUG oslo_concurrency.lockutils [req-7c5985d9-1fd4-4a4b-8cc3-5a544bfcfbde req-16e23e90-29be-437d-865a-09ee3fec2560 service nova] Acquiring lock "0266b3f5-ee31-46d7-af5e-844a27bfd829-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2610.027325] env[61663]: DEBUG oslo_concurrency.lockutils [req-7c5985d9-1fd4-4a4b-8cc3-5a544bfcfbde req-16e23e90-29be-437d-865a-09ee3fec2560 service nova] Lock "0266b3f5-ee31-46d7-af5e-844a27bfd829-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2610.027325] env[61663]: DEBUG oslo_concurrency.lockutils [req-7c5985d9-1fd4-4a4b-8cc3-5a544bfcfbde req-16e23e90-29be-437d-865a-09ee3fec2560 service nova] Lock "0266b3f5-ee31-46d7-af5e-844a27bfd829-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2610.027325] env[61663]: DEBUG nova.compute.manager [req-7c5985d9-1fd4-4a4b-8cc3-5a544bfcfbde req-16e23e90-29be-437d-865a-09ee3fec2560 service nova] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] No waiting events found dispatching network-vif-plugged-d52ed9bb-0c2a-4984-a221-80c5ad81205f {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2610.028018] env[61663]: WARNING nova.compute.manager [req-7c5985d9-1fd4-4a4b-8cc3-5a544bfcfbde req-16e23e90-29be-437d-865a-09ee3fec2560 service nova] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Received unexpected event network-vif-plugged-d52ed9bb-0c2a-4984-a221-80c5ad81205f for instance with vm_state building and task_state spawning. [ 2610.181516] env[61663]: DEBUG nova.network.neutron [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Successfully updated port: d52ed9bb-0c2a-4984-a221-80c5ad81205f {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2610.197466] env[61663]: DEBUG oslo_concurrency.lockutils [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "refresh_cache-0266b3f5-ee31-46d7-af5e-844a27bfd829" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2610.198206] env[61663]: DEBUG oslo_concurrency.lockutils [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquired lock "refresh_cache-0266b3f5-ee31-46d7-af5e-844a27bfd829" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2610.199703] env[61663]: DEBUG nova.network.neutron [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2610.241944] env[61663]: DEBUG nova.network.neutron [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2610.434047] env[61663]: DEBUG nova.network.neutron [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Updating instance_info_cache with network_info: [{"id": "d52ed9bb-0c2a-4984-a221-80c5ad81205f", "address": "fa:16:3e:e9:84:3f", "network": {"id": "063a4d32-b405-433b-a1a0-ce9820683878", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-673894266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bb1bdc9b1004ff591ab4e001d81b400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd52ed9bb-0c", "ovs_interfaceid": "d52ed9bb-0c2a-4984-a221-80c5ad81205f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2610.449263] env[61663]: DEBUG oslo_concurrency.lockutils [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Releasing lock "refresh_cache-0266b3f5-ee31-46d7-af5e-844a27bfd829" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2610.449571] env[61663]: DEBUG nova.compute.manager [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Instance network_info: |[{"id": "d52ed9bb-0c2a-4984-a221-80c5ad81205f", "address": "fa:16:3e:e9:84:3f", "network": {"id": "063a4d32-b405-433b-a1a0-ce9820683878", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-673894266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bb1bdc9b1004ff591ab4e001d81b400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd52ed9bb-0c", "ovs_interfaceid": "d52ed9bb-0c2a-4984-a221-80c5ad81205f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2610.449979] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:84:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa09e855-8af1-419b-b78d-8ffcc94b1bfb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd52ed9bb-0c2a-4984-a221-80c5ad81205f', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2610.458014] env[61663]: DEBUG oslo.service.loopingcall [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2610.458504] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2610.458736] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d6ce7c55-2142-481e-86ba-3dd3cf2e82f7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2610.479424] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2610.479424] env[61663]: value = "task-1690891" [ 2610.479424] env[61663]: _type = "Task" [ 2610.479424] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2610.487524] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690891, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2610.989754] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690891, 'name': CreateVM_Task, 'duration_secs': 0.301595} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2610.989866] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2610.990588] env[61663]: DEBUG oslo_concurrency.lockutils [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2610.990760] env[61663]: DEBUG oslo_concurrency.lockutils [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2610.991097] env[61663]: DEBUG oslo_concurrency.lockutils [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2610.991349] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b607b76-d0e8-41c4-b501-313a71797dfa {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2610.995529] env[61663]: DEBUG oslo_vmware.api [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for the task: (returnval){ [ 2610.995529] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ecffbf-8c2e-d08b-746a-3f910a192f23" [ 2610.995529] env[61663]: _type = "Task" [ 2610.995529] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2611.004183] env[61663]: DEBUG oslo_vmware.api [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ecffbf-8c2e-d08b-746a-3f910a192f23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2611.506183] env[61663]: DEBUG oslo_concurrency.lockutils [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2611.506559] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2611.506654] env[61663]: DEBUG oslo_concurrency.lockutils [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2612.050020] env[61663]: DEBUG nova.compute.manager [req-a01bd683-e9f0-4808-8215-58daf8b34855 req-b0a5063b-47b8-4097-a373-682b99a92bfe service nova] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Received event network-changed-d52ed9bb-0c2a-4984-a221-80c5ad81205f {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2612.050231] env[61663]: DEBUG nova.compute.manager [req-a01bd683-e9f0-4808-8215-58daf8b34855 req-b0a5063b-47b8-4097-a373-682b99a92bfe service nova] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Refreshing instance network info cache due to event network-changed-d52ed9bb-0c2a-4984-a221-80c5ad81205f. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2612.050462] env[61663]: DEBUG oslo_concurrency.lockutils [req-a01bd683-e9f0-4808-8215-58daf8b34855 req-b0a5063b-47b8-4097-a373-682b99a92bfe service nova] Acquiring lock "refresh_cache-0266b3f5-ee31-46d7-af5e-844a27bfd829" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2612.050614] env[61663]: DEBUG oslo_concurrency.lockutils [req-a01bd683-e9f0-4808-8215-58daf8b34855 req-b0a5063b-47b8-4097-a373-682b99a92bfe service nova] Acquired lock "refresh_cache-0266b3f5-ee31-46d7-af5e-844a27bfd829" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2612.050778] env[61663]: DEBUG nova.network.neutron [req-a01bd683-e9f0-4808-8215-58daf8b34855 req-b0a5063b-47b8-4097-a373-682b99a92bfe service nova] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Refreshing network info cache for port d52ed9bb-0c2a-4984-a221-80c5ad81205f {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2612.333238] env[61663]: DEBUG nova.network.neutron [req-a01bd683-e9f0-4808-8215-58daf8b34855 req-b0a5063b-47b8-4097-a373-682b99a92bfe service nova] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Updated VIF entry in instance network info cache for port d52ed9bb-0c2a-4984-a221-80c5ad81205f. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2612.333586] env[61663]: DEBUG nova.network.neutron [req-a01bd683-e9f0-4808-8215-58daf8b34855 req-b0a5063b-47b8-4097-a373-682b99a92bfe service nova] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Updating instance_info_cache with network_info: [{"id": "d52ed9bb-0c2a-4984-a221-80c5ad81205f", "address": "fa:16:3e:e9:84:3f", "network": {"id": "063a4d32-b405-433b-a1a0-ce9820683878", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-673894266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bb1bdc9b1004ff591ab4e001d81b400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd52ed9bb-0c", "ovs_interfaceid": "d52ed9bb-0c2a-4984-a221-80c5ad81205f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2612.343496] env[61663]: DEBUG oslo_concurrency.lockutils [req-a01bd683-e9f0-4808-8215-58daf8b34855 req-b0a5063b-47b8-4097-a373-682b99a92bfe service nova] Releasing lock "refresh_cache-0266b3f5-ee31-46d7-af5e-844a27bfd829" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2620.692600] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2622.692274] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2628.687575] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2629.692593] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2629.692945] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2629.692945] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2629.715268] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2629.715468] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2629.715568] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2629.715704] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2629.715807] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2629.715905] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2629.716018] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2629.716301] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2629.716428] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2629.716544] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2629.716653] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2629.717177] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2631.693323] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2632.692112] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2632.703473] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2632.703876] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2632.703876] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2632.704019] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2632.705220] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6c428ce-36e3-4a2d-b269-0f2dba169a86 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2632.714122] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a7a60a-ba6e-463c-b503-435a7d243eef {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2632.727526] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2aad14c-b188-4e1d-a217-107c6c7c5b6d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2632.733522] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23abbb69-fa82-4413-bf55-c8666d74065b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2632.762774] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181303MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2632.762960] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2632.763147] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2632.833690] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance e47c9821-f815-4bd5-bf00-8822f08e3333 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2632.833853] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance c21a5af5-004b-4544-bcf0-f105d6f336c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2632.833982] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 202e0f58-b057-4e57-8a92-c06d6efda570 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2632.834121] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 67f8162d-a631-4f0e-b03c-fd76ee131615 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2632.834243] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 029ec7ad-96a1-42e0-a926-c1aab1de05a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2632.834360] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 0adee33d-8d0c-4bcf-8df4-11465be00485 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2632.834477] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2632.834594] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2632.834710] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 8fc36ed9-9315-4bdb-b4f3-248106a3c681 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2632.834824] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 0266b3f5-ee31-46d7-af5e-844a27bfd829 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2632.846850] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 911c036c-c7d8-4ff7-b874-335361fb5281 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2632.857159] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance bcc3a109-50ca-4a22-90f3-609231a3e95f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2632.857386] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2632.857534] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2632.991418] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-624dfc45-616a-4085-b13c-f90693adece5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2632.999185] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7ad0c5d-1c0c-4302-a876-7a1b4b45701e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2633.027783] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f92cb5c-c8c4-42ed-bd0a-9fb9bfd434a5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2633.034818] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee975a7a-ceb3-45e7-afc8-5d03925e1bf2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2633.048300] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2633.057343] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2633.070726] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2633.070920] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.308s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2633.146113] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3cb0ba0b-1d97-4494-b95c-fd7cb90b460e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "0266b3f5-ee31-46d7-af5e-844a27bfd829" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2634.071706] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2637.692160] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2637.692452] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2644.688579] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2656.661433] env[61663]: WARNING oslo_vmware.rw_handles [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2656.661433] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2656.661433] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2656.661433] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2656.661433] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2656.661433] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 2656.661433] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2656.661433] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2656.661433] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2656.661433] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2656.661433] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2656.661433] env[61663]: ERROR oslo_vmware.rw_handles [ 2656.662101] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/980ada7f-7861-41bd-b827-5372a66eede3/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2656.664055] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2656.664328] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Copying Virtual Disk [datastore1] vmware_temp/980ada7f-7861-41bd-b827-5372a66eede3/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/980ada7f-7861-41bd-b827-5372a66eede3/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2656.664622] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d6bd5bae-955c-47f8-8836-cd3fa19dc171 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2656.673462] env[61663]: DEBUG oslo_vmware.api [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for the task: (returnval){ [ 2656.673462] env[61663]: value = "task-1690892" [ 2656.673462] env[61663]: _type = "Task" [ 2656.673462] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2656.681132] env[61663]: DEBUG oslo_vmware.api [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Task: {'id': task-1690892, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2657.184018] env[61663]: DEBUG oslo_vmware.exceptions [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2657.184310] env[61663]: DEBUG oslo_concurrency.lockutils [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2657.184873] env[61663]: ERROR nova.compute.manager [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2657.184873] env[61663]: Faults: ['InvalidArgument'] [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Traceback (most recent call last): [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] yield resources [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] self.driver.spawn(context, instance, image_meta, [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] self._fetch_image_if_missing(context, vi) [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] image_cache(vi, tmp_image_ds_loc) [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] vm_util.copy_virtual_disk( [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] session._wait_for_task(vmdk_copy_task) [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] return self.wait_for_task(task_ref) [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] return evt.wait() [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] result = hub.switch() [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] return self.greenlet.switch() [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] self.f(*self.args, **self.kw) [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] raise exceptions.translate_fault(task_info.error) [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Faults: ['InvalidArgument'] [ 2657.184873] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] [ 2657.186141] env[61663]: INFO nova.compute.manager [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Terminating instance [ 2657.186771] env[61663]: DEBUG oslo_concurrency.lockutils [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2657.186977] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2657.187684] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94ff13ab-4f8d-4fb3-a0f9-90aa18ea4e8f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2657.189517] env[61663]: DEBUG nova.compute.manager [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2657.189713] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2657.190420] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d433513-a451-4480-ab19-ee5e5af32c30 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2657.196977] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2657.197199] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-644c897e-89bc-4cd5-9086-5cb5035ab921 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2657.200045] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2657.200045] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2657.201087] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b499b9e2-74a4-4026-925d-ee17d19df956 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2657.205313] env[61663]: DEBUG oslo_vmware.api [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Waiting for the task: (returnval){ [ 2657.205313] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526a0021-5b57-6dff-0bbf-1550f9bbcd21" [ 2657.205313] env[61663]: _type = "Task" [ 2657.205313] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2657.212159] env[61663]: DEBUG oslo_vmware.api [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526a0021-5b57-6dff-0bbf-1550f9bbcd21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2657.634888] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2657.635058] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2657.635234] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Deleting the datastore file [datastore1] e47c9821-f815-4bd5-bf00-8822f08e3333 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2657.635496] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-876724a7-7a44-41cf-b3e2-942f66389f44 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2657.642206] env[61663]: DEBUG oslo_vmware.api [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for the task: (returnval){ [ 2657.642206] env[61663]: value = "task-1690894" [ 2657.642206] env[61663]: _type = "Task" [ 2657.642206] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2657.649490] env[61663]: DEBUG oslo_vmware.api [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Task: {'id': task-1690894, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2657.714788] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2657.715137] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Creating directory with path [datastore1] vmware_temp/99fa93e0-be10-496c-8941-173a5122fa26/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2657.715271] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8db51ce4-e235-4efd-8020-2cfea7cc8bff {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2657.734819] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Created directory with path [datastore1] vmware_temp/99fa93e0-be10-496c-8941-173a5122fa26/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2657.735044] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Fetch image to [datastore1] vmware_temp/99fa93e0-be10-496c-8941-173a5122fa26/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2657.735224] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/99fa93e0-be10-496c-8941-173a5122fa26/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2657.736015] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-936d5015-6628-4548-a506-132f79da93c0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2657.743836] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1929c4-cfa9-4535-9e57-75c6660c7462 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2657.752546] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd56ba28-51c0-4522-bb33-7f0c8a281f85 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2657.782075] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1238e1d-de38-42ac-a08b-5d9e60356de0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2657.787287] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e2db3215-13e9-4de4-95b8-7c1dfef0566b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2657.808053] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2657.854925] env[61663]: DEBUG oslo_vmware.rw_handles [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/99fa93e0-be10-496c-8941-173a5122fa26/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2657.916140] env[61663]: DEBUG oslo_vmware.rw_handles [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2657.916349] env[61663]: DEBUG oslo_vmware.rw_handles [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/99fa93e0-be10-496c-8941-173a5122fa26/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2658.151560] env[61663]: DEBUG oslo_vmware.api [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Task: {'id': task-1690894, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091743} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2658.151803] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2658.151986] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2658.152213] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2658.152393] env[61663]: INFO nova.compute.manager [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Took 0.96 seconds to destroy the instance on the hypervisor. [ 2658.154671] env[61663]: DEBUG nova.compute.claims [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2658.154845] env[61663]: DEBUG oslo_concurrency.lockutils [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2658.155077] env[61663]: DEBUG oslo_concurrency.lockutils [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2658.341364] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1654b629-b9dd-4f65-8100-176bdd5bcb76 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2658.348837] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1526c50-cfe8-4652-9073-420cc1ddb013 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2658.378496] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35147fd7-92ce-40fa-aef2-207329586678 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2658.384925] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b16b79f9-2439-4837-a716-e48c6cd2e391 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2658.397415] env[61663]: DEBUG nova.compute.provider_tree [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2658.406429] env[61663]: DEBUG nova.scheduler.client.report [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2658.422372] env[61663]: DEBUG oslo_concurrency.lockutils [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.267s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2658.422893] env[61663]: ERROR nova.compute.manager [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2658.422893] env[61663]: Faults: ['InvalidArgument'] [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Traceback (most recent call last): [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] self.driver.spawn(context, instance, image_meta, [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] self._fetch_image_if_missing(context, vi) [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] image_cache(vi, tmp_image_ds_loc) [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] vm_util.copy_virtual_disk( [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] session._wait_for_task(vmdk_copy_task) [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] return self.wait_for_task(task_ref) [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] return evt.wait() [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] result = hub.switch() [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] return self.greenlet.switch() [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] self.f(*self.args, **self.kw) [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] raise exceptions.translate_fault(task_info.error) [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Faults: ['InvalidArgument'] [ 2658.422893] env[61663]: ERROR nova.compute.manager [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] [ 2658.423994] env[61663]: DEBUG nova.compute.utils [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2658.424994] env[61663]: DEBUG nova.compute.manager [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Build of instance e47c9821-f815-4bd5-bf00-8822f08e3333 was re-scheduled: A specified parameter was not correct: fileType [ 2658.424994] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2658.425384] env[61663]: DEBUG nova.compute.manager [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2658.425560] env[61663]: DEBUG nova.compute.manager [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2658.425732] env[61663]: DEBUG nova.compute.manager [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2658.425895] env[61663]: DEBUG nova.network.neutron [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2658.725966] env[61663]: DEBUG nova.network.neutron [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2658.737312] env[61663]: INFO nova.compute.manager [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Took 0.31 seconds to deallocate network for instance. [ 2658.832548] env[61663]: INFO nova.scheduler.client.report [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Deleted allocations for instance e47c9821-f815-4bd5-bf00-8822f08e3333 [ 2658.857045] env[61663]: DEBUG oslo_concurrency.lockutils [None req-74fcc550-ccb4-438e-9819-30f9bf112e04 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "e47c9821-f815-4bd5-bf00-8822f08e3333" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 615.092s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2658.858231] env[61663]: DEBUG oslo_concurrency.lockutils [None req-917e5edc-ffb3-4313-8a0f-0c464dbc4b35 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "e47c9821-f815-4bd5-bf00-8822f08e3333" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 419.143s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2658.860986] env[61663]: DEBUG oslo_concurrency.lockutils [None req-917e5edc-ffb3-4313-8a0f-0c464dbc4b35 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "e47c9821-f815-4bd5-bf00-8822f08e3333-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2658.860986] env[61663]: DEBUG oslo_concurrency.lockutils [None req-917e5edc-ffb3-4313-8a0f-0c464dbc4b35 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "e47c9821-f815-4bd5-bf00-8822f08e3333-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2658.860986] env[61663]: DEBUG oslo_concurrency.lockutils [None req-917e5edc-ffb3-4313-8a0f-0c464dbc4b35 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "e47c9821-f815-4bd5-bf00-8822f08e3333-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2658.860986] env[61663]: INFO nova.compute.manager [None req-917e5edc-ffb3-4313-8a0f-0c464dbc4b35 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Terminating instance [ 2658.862858] env[61663]: DEBUG nova.compute.manager [None req-917e5edc-ffb3-4313-8a0f-0c464dbc4b35 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2658.862858] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-917e5edc-ffb3-4313-8a0f-0c464dbc4b35 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2658.863219] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eb09e8c8-aadf-4489-b4f7-98634b99cba8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2658.873443] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a41570-991b-4709-a524-925670e66d8f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2658.885481] env[61663]: DEBUG nova.compute.manager [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2658.906659] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-917e5edc-ffb3-4313-8a0f-0c464dbc4b35 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e47c9821-f815-4bd5-bf00-8822f08e3333 could not be found. [ 2658.906876] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-917e5edc-ffb3-4313-8a0f-0c464dbc4b35 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2658.907069] env[61663]: INFO nova.compute.manager [None req-917e5edc-ffb3-4313-8a0f-0c464dbc4b35 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2658.907321] env[61663]: DEBUG oslo.service.loopingcall [None req-917e5edc-ffb3-4313-8a0f-0c464dbc4b35 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2658.907548] env[61663]: DEBUG nova.compute.manager [-] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2658.907644] env[61663]: DEBUG nova.network.neutron [-] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2658.938214] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2658.938484] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2658.939928] env[61663]: INFO nova.compute.claims [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2659.117070] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7cf5dda-4479-4c33-9391-e69fcc40eeed {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2659.126739] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-099462b8-ec3e-4ec6-9618-a7d0cea92340 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2659.130310] env[61663]: DEBUG nova.network.neutron [-] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2659.157936] env[61663]: INFO nova.compute.manager [-] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] Took 0.25 seconds to deallocate network for instance. [ 2659.158657] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa31df3a-a783-4f23-bea2-93c8eb88c341 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2659.169303] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2f68c7-e0ed-4880-bb35-8da4f2358b83 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2659.182561] env[61663]: DEBUG nova.compute.provider_tree [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2659.191387] env[61663]: DEBUG nova.scheduler.client.report [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2659.207342] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.269s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2659.207823] env[61663]: DEBUG nova.compute.manager [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2659.245031] env[61663]: DEBUG nova.compute.utils [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2659.246306] env[61663]: DEBUG nova.compute.manager [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2659.246947] env[61663]: DEBUG nova.network.neutron [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2659.261654] env[61663]: DEBUG nova.compute.manager [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2659.266821] env[61663]: DEBUG oslo_concurrency.lockutils [None req-917e5edc-ffb3-4313-8a0f-0c464dbc4b35 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "e47c9821-f815-4bd5-bf00-8822f08e3333" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.409s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2659.267706] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "e47c9821-f815-4bd5-bf00-8822f08e3333" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 243.181s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2659.268392] env[61663]: INFO nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: e47c9821-f815-4bd5-bf00-8822f08e3333] During sync_power_state the instance has a pending task (deleting). Skip. [ 2659.268392] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "e47c9821-f815-4bd5-bf00-8822f08e3333" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2659.324731] env[61663]: DEBUG nova.compute.manager [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2659.355111] env[61663]: DEBUG nova.virt.hardware [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2659.355374] env[61663]: DEBUG nova.virt.hardware [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2659.355549] env[61663]: DEBUG nova.virt.hardware [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2659.358010] env[61663]: DEBUG nova.virt.hardware [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2659.358010] env[61663]: DEBUG nova.virt.hardware [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2659.358010] env[61663]: DEBUG nova.virt.hardware [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2659.358010] env[61663]: DEBUG nova.virt.hardware [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2659.358010] env[61663]: DEBUG nova.virt.hardware [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2659.358010] env[61663]: DEBUG nova.virt.hardware [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2659.358010] env[61663]: DEBUG nova.virt.hardware [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2659.358010] env[61663]: DEBUG nova.virt.hardware [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2659.358370] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a06f7908-16f8-43d5-865c-9a591e84e131 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2659.368568] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3acd4543-40a8-4272-9060-db1f2147382f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2659.375269] env[61663]: DEBUG nova.policy [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '23af862ab660499ab02b71d7cbbe87a7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '231acc431e92432795932c50511f2944', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 2659.746836] env[61663]: DEBUG nova.network.neutron [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Successfully created port: ef324c97-d282-441d-b565-8172a02f5d7f {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2660.698036] env[61663]: DEBUG nova.network.neutron [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Successfully updated port: ef324c97-d282-441d-b565-8172a02f5d7f {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2660.712020] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquiring lock "refresh_cache-911c036c-c7d8-4ff7-b874-335361fb5281" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2660.712020] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquired lock "refresh_cache-911c036c-c7d8-4ff7-b874-335361fb5281" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2660.712020] env[61663]: DEBUG nova.network.neutron [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2660.776436] env[61663]: DEBUG nova.compute.manager [req-9e41fa9a-2fbb-492f-81dd-5e3ff6446897 req-aba58011-6041-47a4-99e4-5886c2263298 service nova] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Received event network-vif-plugged-ef324c97-d282-441d-b565-8172a02f5d7f {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2660.776436] env[61663]: DEBUG oslo_concurrency.lockutils [req-9e41fa9a-2fbb-492f-81dd-5e3ff6446897 req-aba58011-6041-47a4-99e4-5886c2263298 service nova] Acquiring lock "911c036c-c7d8-4ff7-b874-335361fb5281-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2660.776436] env[61663]: DEBUG oslo_concurrency.lockutils [req-9e41fa9a-2fbb-492f-81dd-5e3ff6446897 req-aba58011-6041-47a4-99e4-5886c2263298 service nova] Lock "911c036c-c7d8-4ff7-b874-335361fb5281-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2660.776436] env[61663]: DEBUG oslo_concurrency.lockutils [req-9e41fa9a-2fbb-492f-81dd-5e3ff6446897 req-aba58011-6041-47a4-99e4-5886c2263298 service nova] Lock "911c036c-c7d8-4ff7-b874-335361fb5281-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2660.777481] env[61663]: DEBUG nova.compute.manager [req-9e41fa9a-2fbb-492f-81dd-5e3ff6446897 req-aba58011-6041-47a4-99e4-5886c2263298 service nova] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] No waiting events found dispatching network-vif-plugged-ef324c97-d282-441d-b565-8172a02f5d7f {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2660.777572] env[61663]: WARNING nova.compute.manager [req-9e41fa9a-2fbb-492f-81dd-5e3ff6446897 req-aba58011-6041-47a4-99e4-5886c2263298 service nova] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Received unexpected event network-vif-plugged-ef324c97-d282-441d-b565-8172a02f5d7f for instance with vm_state building and task_state spawning. [ 2660.777870] env[61663]: DEBUG nova.compute.manager [req-9e41fa9a-2fbb-492f-81dd-5e3ff6446897 req-aba58011-6041-47a4-99e4-5886c2263298 service nova] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Received event network-changed-ef324c97-d282-441d-b565-8172a02f5d7f {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2660.777870] env[61663]: DEBUG nova.compute.manager [req-9e41fa9a-2fbb-492f-81dd-5e3ff6446897 req-aba58011-6041-47a4-99e4-5886c2263298 service nova] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Refreshing instance network info cache due to event network-changed-ef324c97-d282-441d-b565-8172a02f5d7f. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2660.778071] env[61663]: DEBUG oslo_concurrency.lockutils [req-9e41fa9a-2fbb-492f-81dd-5e3ff6446897 req-aba58011-6041-47a4-99e4-5886c2263298 service nova] Acquiring lock "refresh_cache-911c036c-c7d8-4ff7-b874-335361fb5281" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2660.778957] env[61663]: DEBUG nova.network.neutron [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2660.949473] env[61663]: DEBUG nova.network.neutron [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Updating instance_info_cache with network_info: [{"id": "ef324c97-d282-441d-b565-8172a02f5d7f", "address": "fa:16:3e:b3:10:ca", "network": {"id": "c74991f7-41c9-42d7-9978-5fba7e2b62af", "bridge": "br-int", "label": "tempest-ServersTestJSON-1185630305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "231acc431e92432795932c50511f2944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef324c97-d2", "ovs_interfaceid": "ef324c97-d282-441d-b565-8172a02f5d7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2660.962222] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Releasing lock "refresh_cache-911c036c-c7d8-4ff7-b874-335361fb5281" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2660.962526] env[61663]: DEBUG nova.compute.manager [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Instance network_info: |[{"id": "ef324c97-d282-441d-b565-8172a02f5d7f", "address": "fa:16:3e:b3:10:ca", "network": {"id": "c74991f7-41c9-42d7-9978-5fba7e2b62af", "bridge": "br-int", "label": "tempest-ServersTestJSON-1185630305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "231acc431e92432795932c50511f2944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef324c97-d2", "ovs_interfaceid": "ef324c97-d282-441d-b565-8172a02f5d7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2660.962827] env[61663]: DEBUG oslo_concurrency.lockutils [req-9e41fa9a-2fbb-492f-81dd-5e3ff6446897 req-aba58011-6041-47a4-99e4-5886c2263298 service nova] Acquired lock "refresh_cache-911c036c-c7d8-4ff7-b874-335361fb5281" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2660.963015] env[61663]: DEBUG nova.network.neutron [req-9e41fa9a-2fbb-492f-81dd-5e3ff6446897 req-aba58011-6041-47a4-99e4-5886c2263298 service nova] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Refreshing network info cache for port ef324c97-d282-441d-b565-8172a02f5d7f {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2660.964239] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:10:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f67a2790-f2b0-4d03-b606-0bfaee7a4229', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef324c97-d282-441d-b565-8172a02f5d7f', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2660.979020] env[61663]: DEBUG oslo.service.loopingcall [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2660.979020] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2660.979605] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f93947f0-6410-4865-8251-5cced2c7ed2d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2661.002662] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2661.002662] env[61663]: value = "task-1690895" [ 2661.002662] env[61663]: _type = "Task" [ 2661.002662] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2661.011859] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690895, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2661.293801] env[61663]: DEBUG nova.network.neutron [req-9e41fa9a-2fbb-492f-81dd-5e3ff6446897 req-aba58011-6041-47a4-99e4-5886c2263298 service nova] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Updated VIF entry in instance network info cache for port ef324c97-d282-441d-b565-8172a02f5d7f. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2661.294197] env[61663]: DEBUG nova.network.neutron [req-9e41fa9a-2fbb-492f-81dd-5e3ff6446897 req-aba58011-6041-47a4-99e4-5886c2263298 service nova] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Updating instance_info_cache with network_info: [{"id": "ef324c97-d282-441d-b565-8172a02f5d7f", "address": "fa:16:3e:b3:10:ca", "network": {"id": "c74991f7-41c9-42d7-9978-5fba7e2b62af", "bridge": "br-int", "label": "tempest-ServersTestJSON-1185630305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "231acc431e92432795932c50511f2944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef324c97-d2", "ovs_interfaceid": "ef324c97-d282-441d-b565-8172a02f5d7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2661.305607] env[61663]: DEBUG oslo_concurrency.lockutils [req-9e41fa9a-2fbb-492f-81dd-5e3ff6446897 req-aba58011-6041-47a4-99e4-5886c2263298 service nova] Releasing lock "refresh_cache-911c036c-c7d8-4ff7-b874-335361fb5281" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2661.513359] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690895, 'name': CreateVM_Task, 'duration_secs': 0.466878} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2661.513359] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2661.513925] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2661.514102] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2661.514448] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2661.514697] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9023dfce-7f86-49c3-8164-a258333da2d8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2661.520944] env[61663]: DEBUG oslo_vmware.api [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Waiting for the task: (returnval){ [ 2661.520944] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52588feb-c9b4-b778-4a12-6cb3afc57287" [ 2661.520944] env[61663]: _type = "Task" [ 2661.520944] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2661.530768] env[61663]: DEBUG oslo_vmware.api [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52588feb-c9b4-b778-4a12-6cb3afc57287, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2662.031385] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2662.031849] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2662.031849] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2676.659043] env[61663]: DEBUG oslo_concurrency.lockutils [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquiring lock "b98fdfac-2912-403c-a087-46e8eaf40829" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2676.659334] env[61663]: DEBUG oslo_concurrency.lockutils [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Lock "b98fdfac-2912-403c-a087-46e8eaf40829" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2680.692563] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2684.691748] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2686.595040] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4e33cfa1-26b5-4261-b8da-479619392b9e tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquiring lock "911c036c-c7d8-4ff7-b874-335361fb5281" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2690.687976] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2691.693053] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2691.693053] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2691.693053] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2691.714857] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2691.715042] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2691.715179] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2691.715308] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2691.715431] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2691.715556] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2691.715681] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2691.715800] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2691.715919] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2691.716047] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2691.716172] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2691.716645] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2693.692070] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2693.692070] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2693.704579] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2693.704821] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2693.705039] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2693.705210] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2693.706332] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-710c18c8-2b12-486d-bbcf-8589a16bc2c4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2693.715087] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e1e769-4b3f-45a5-b6c6-c20529aaf7b5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2693.728706] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5779596e-9d13-4554-aa7e-326a1152ffd6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2693.734698] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8487636e-8f4d-4c78-898e-76134f74053c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2693.764020] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181319MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2693.764187] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2693.764381] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2693.900180] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance c21a5af5-004b-4544-bcf0-f105d6f336c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2693.900358] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 202e0f58-b057-4e57-8a92-c06d6efda570 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2693.900493] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 67f8162d-a631-4f0e-b03c-fd76ee131615 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2693.900657] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 029ec7ad-96a1-42e0-a926-c1aab1de05a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2693.900791] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 0adee33d-8d0c-4bcf-8df4-11465be00485 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2693.900915] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2693.901049] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2693.901172] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 8fc36ed9-9315-4bdb-b4f3-248106a3c681 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2693.901289] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 0266b3f5-ee31-46d7-af5e-844a27bfd829 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2693.901404] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 911c036c-c7d8-4ff7-b874-335361fb5281 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2693.913236] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance bcc3a109-50ca-4a22-90f3-609231a3e95f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2693.923250] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b98fdfac-2912-403c-a087-46e8eaf40829 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2693.923464] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2693.923652] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2693.938777] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Refreshing inventories for resource provider b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2693.951280] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Updating ProviderTree inventory for provider b47d006d-a9bd-461e-a5d9-39811f005278 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2693.951456] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Updating inventory in ProviderTree for provider b47d006d-a9bd-461e-a5d9-39811f005278 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2693.961470] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Refreshing aggregate associations for resource provider b47d006d-a9bd-461e-a5d9-39811f005278, aggregates: None {{(pid=61663) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2693.979666] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Refreshing trait associations for resource provider b47d006d-a9bd-461e-a5d9-39811f005278, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61663) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2694.111742] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e88d818-fa37-40b4-afac-0d258e95a8eb {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2694.119354] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c40428ca-3620-4e90-8d27-0910028d7869 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2694.147984] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e2859c-e8ca-43ab-a6d5-279b6e0dbadc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2694.154663] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade2c393-66bf-49b5-bf5f-e7887a13ef63 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2694.167430] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2694.202396] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2694.218359] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2694.218545] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.454s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2694.692150] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2694.692434] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2694.692613] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Cleaning up deleted instances {{(pid=61663) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 2694.701855] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] There are 0 instances to clean {{(pid=61663) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 2696.692799] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2697.700952] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2697.700952] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2702.693609] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2702.693905] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Cleaning up deleted instances with incomplete migration {{(pid=61663) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 2705.951025] env[61663]: WARNING oslo_vmware.rw_handles [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2705.951025] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2705.951025] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2705.951025] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2705.951025] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2705.951025] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 2705.951025] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2705.951025] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2705.951025] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2705.951025] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2705.951025] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2705.951025] env[61663]: ERROR oslo_vmware.rw_handles [ 2705.951025] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/99fa93e0-be10-496c-8941-173a5122fa26/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2705.952816] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2705.953033] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Copying Virtual Disk [datastore1] vmware_temp/99fa93e0-be10-496c-8941-173a5122fa26/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/99fa93e0-be10-496c-8941-173a5122fa26/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2705.953464] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d66c3b09-3889-42dc-b7ff-6c5a0d67e673 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2705.961287] env[61663]: DEBUG oslo_vmware.api [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Waiting for the task: (returnval){ [ 2705.961287] env[61663]: value = "task-1690896" [ 2705.961287] env[61663]: _type = "Task" [ 2705.961287] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2705.969418] env[61663]: DEBUG oslo_vmware.api [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Task: {'id': task-1690896, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2706.471754] env[61663]: DEBUG oslo_vmware.exceptions [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2706.472045] env[61663]: DEBUG oslo_concurrency.lockutils [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2706.472612] env[61663]: ERROR nova.compute.manager [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2706.472612] env[61663]: Faults: ['InvalidArgument'] [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Traceback (most recent call last): [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] yield resources [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] self.driver.spawn(context, instance, image_meta, [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] self._fetch_image_if_missing(context, vi) [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] image_cache(vi, tmp_image_ds_loc) [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] vm_util.copy_virtual_disk( [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] session._wait_for_task(vmdk_copy_task) [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] return self.wait_for_task(task_ref) [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] return evt.wait() [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] result = hub.switch() [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] return self.greenlet.switch() [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] self.f(*self.args, **self.kw) [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] raise exceptions.translate_fault(task_info.error) [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Faults: ['InvalidArgument'] [ 2706.472612] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] [ 2706.473650] env[61663]: INFO nova.compute.manager [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Terminating instance [ 2706.474495] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2706.474706] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2706.474936] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f2aa8412-5dea-458d-83f0-f19a2240dcea {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2706.477234] env[61663]: DEBUG nova.compute.manager [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2706.477430] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2706.478135] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e28d60de-4323-45b7-ba78-f9b5abc6e675 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2706.484810] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2706.485048] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48774e90-0a59-444c-9ebd-0923b2a0e423 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2706.487021] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2706.487231] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2706.488155] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6da6475-a23b-4d69-8558-016e61c33ff1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2706.492882] env[61663]: DEBUG oslo_vmware.api [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Waiting for the task: (returnval){ [ 2706.492882] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]524e8c5b-2cc2-c33b-61d3-200e9774b428" [ 2706.492882] env[61663]: _type = "Task" [ 2706.492882] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2706.501350] env[61663]: DEBUG oslo_vmware.api [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]524e8c5b-2cc2-c33b-61d3-200e9774b428, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2706.574714] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2706.574935] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2706.575173] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Deleting the datastore file [datastore1] c21a5af5-004b-4544-bcf0-f105d6f336c9 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2706.575440] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74aeef25-e6fd-4791-92a0-18d0ac4af952 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2706.582203] env[61663]: DEBUG oslo_vmware.api [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Waiting for the task: (returnval){ [ 2706.582203] env[61663]: value = "task-1690898" [ 2706.582203] env[61663]: _type = "Task" [ 2706.582203] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2706.589539] env[61663]: DEBUG oslo_vmware.api [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Task: {'id': task-1690898, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2707.003254] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2707.003618] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Creating directory with path [datastore1] vmware_temp/ac431b28-e782-4a16-acc1-1d91ddef4a7f/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2707.003780] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a3c9841-f84e-4d29-aab0-c9a6f705e435 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2707.014865] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Created directory with path [datastore1] vmware_temp/ac431b28-e782-4a16-acc1-1d91ddef4a7f/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2707.015089] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Fetch image to [datastore1] vmware_temp/ac431b28-e782-4a16-acc1-1d91ddef4a7f/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2707.015282] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/ac431b28-e782-4a16-acc1-1d91ddef4a7f/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2707.015975] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08b8c75f-e076-4443-9d27-fb1c18d8e4a2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2707.022463] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4306cc92-230a-4ca1-8dce-f59d26d2989e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2707.031092] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7522bf43-32f1-4815-90e8-b95ebf0d4fab {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2707.061668] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ae9499-ce11-4e66-8770-f1e2859b7fba {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2707.067604] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a79f73ed-6128-42ca-82da-7a27012cc09a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2707.087826] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2707.092716] env[61663]: DEBUG oslo_vmware.api [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Task: {'id': task-1690898, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084368} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2707.092964] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2707.093164] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2707.093340] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2707.093515] env[61663]: INFO nova.compute.manager [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Took 0.62 seconds to destroy the instance on the hypervisor. [ 2707.095618] env[61663]: DEBUG nova.compute.claims [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2707.095800] env[61663]: DEBUG oslo_concurrency.lockutils [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2707.096025] env[61663]: DEBUG oslo_concurrency.lockutils [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2707.139473] env[61663]: DEBUG oslo_vmware.rw_handles [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ac431b28-e782-4a16-acc1-1d91ddef4a7f/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2707.202483] env[61663]: DEBUG oslo_vmware.rw_handles [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2707.202483] env[61663]: DEBUG oslo_vmware.rw_handles [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ac431b28-e782-4a16-acc1-1d91ddef4a7f/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2707.333530] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb0ae8b9-684e-477a-84f8-694e68e8880f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2707.340839] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39262fc7-52af-4678-821e-49e96ea0d0c2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2707.370271] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c107d636-9b3e-4384-acb0-cbcdb9360b59 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2707.377814] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56a5dcd0-aef3-4d8b-bbd6-2304131edd68 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2707.390998] env[61663]: DEBUG nova.compute.provider_tree [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2707.403045] env[61663]: DEBUG nova.scheduler.client.report [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2707.415736] env[61663]: DEBUG oslo_concurrency.lockutils [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.320s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2707.416284] env[61663]: ERROR nova.compute.manager [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2707.416284] env[61663]: Faults: ['InvalidArgument'] [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Traceback (most recent call last): [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] self.driver.spawn(context, instance, image_meta, [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] self._fetch_image_if_missing(context, vi) [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] image_cache(vi, tmp_image_ds_loc) [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] vm_util.copy_virtual_disk( [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] session._wait_for_task(vmdk_copy_task) [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] return self.wait_for_task(task_ref) [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] return evt.wait() [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] result = hub.switch() [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] return self.greenlet.switch() [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] self.f(*self.args, **self.kw) [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] raise exceptions.translate_fault(task_info.error) [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Faults: ['InvalidArgument'] [ 2707.416284] env[61663]: ERROR nova.compute.manager [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] [ 2707.417096] env[61663]: DEBUG nova.compute.utils [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2707.418454] env[61663]: DEBUG nova.compute.manager [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Build of instance c21a5af5-004b-4544-bcf0-f105d6f336c9 was re-scheduled: A specified parameter was not correct: fileType [ 2707.418454] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2707.418823] env[61663]: DEBUG nova.compute.manager [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2707.418998] env[61663]: DEBUG nova.compute.manager [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2707.419186] env[61663]: DEBUG nova.compute.manager [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2707.419351] env[61663]: DEBUG nova.network.neutron [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2707.988780] env[61663]: DEBUG nova.network.neutron [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2707.998742] env[61663]: INFO nova.compute.manager [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Took 0.58 seconds to deallocate network for instance. [ 2708.090767] env[61663]: INFO nova.scheduler.client.report [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Deleted allocations for instance c21a5af5-004b-4544-bcf0-f105d6f336c9 [ 2708.114207] env[61663]: DEBUG oslo_concurrency.lockutils [None req-635a1d9b-e5df-413b-ace3-3697ed4d40b6 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Lock "c21a5af5-004b-4544-bcf0-f105d6f336c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 600.531s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2708.115348] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c307fab-5c9e-471b-9e4c-4f9371dadc97 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Lock "c21a5af5-004b-4544-bcf0-f105d6f336c9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 404.499s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2708.115567] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c307fab-5c9e-471b-9e4c-4f9371dadc97 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Acquiring lock "c21a5af5-004b-4544-bcf0-f105d6f336c9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2708.115771] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c307fab-5c9e-471b-9e4c-4f9371dadc97 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Lock "c21a5af5-004b-4544-bcf0-f105d6f336c9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2708.115939] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c307fab-5c9e-471b-9e4c-4f9371dadc97 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Lock "c21a5af5-004b-4544-bcf0-f105d6f336c9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2708.118129] env[61663]: INFO nova.compute.manager [None req-5c307fab-5c9e-471b-9e4c-4f9371dadc97 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Terminating instance [ 2708.119759] env[61663]: DEBUG nova.compute.manager [None req-5c307fab-5c9e-471b-9e4c-4f9371dadc97 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2708.119960] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5c307fab-5c9e-471b-9e4c-4f9371dadc97 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2708.120722] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-45bfd393-9c74-4210-ab40-ee173919b917 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2708.131106] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c623d270-4ed1-4da6-be29-345878a52a7f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2708.141760] env[61663]: DEBUG nova.compute.manager [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2708.161698] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-5c307fab-5c9e-471b-9e4c-4f9371dadc97 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c21a5af5-004b-4544-bcf0-f105d6f336c9 could not be found. [ 2708.161898] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5c307fab-5c9e-471b-9e4c-4f9371dadc97 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2708.162087] env[61663]: INFO nova.compute.manager [None req-5c307fab-5c9e-471b-9e4c-4f9371dadc97 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2708.162319] env[61663]: DEBUG oslo.service.loopingcall [None req-5c307fab-5c9e-471b-9e4c-4f9371dadc97 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2708.162805] env[61663]: DEBUG nova.compute.manager [-] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2708.162805] env[61663]: DEBUG nova.network.neutron [-] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2708.186818] env[61663]: DEBUG nova.network.neutron [-] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2708.188715] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2708.188940] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2708.190532] env[61663]: INFO nova.compute.claims [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2708.193860] env[61663]: INFO nova.compute.manager [-] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] Took 0.03 seconds to deallocate network for instance. [ 2708.282540] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c307fab-5c9e-471b-9e4c-4f9371dadc97 tempest-ServerTagsTestJSON-431779464 tempest-ServerTagsTestJSON-431779464-project-member] Lock "c21a5af5-004b-4544-bcf0-f105d6f336c9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.167s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2708.283447] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "c21a5af5-004b-4544-bcf0-f105d6f336c9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 292.197s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2708.283815] env[61663]: INFO nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: c21a5af5-004b-4544-bcf0-f105d6f336c9] During sync_power_state the instance has a pending task (deleting). Skip. [ 2708.283877] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "c21a5af5-004b-4544-bcf0-f105d6f336c9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2708.387819] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae45a90d-b17b-40cf-af9d-5d13607d876c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2708.400589] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85c6c6f4-00a5-4ba7-bed1-3bbc491a6ab3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2708.452566] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d13323f2-581a-4748-8a8d-0662b8e52d01 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2708.464056] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c298b9d6-e907-421c-835f-cdd88d785c53 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2708.485970] env[61663]: DEBUG nova.compute.provider_tree [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2708.494878] env[61663]: DEBUG nova.scheduler.client.report [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2708.511808] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.323s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2708.512782] env[61663]: DEBUG nova.compute.manager [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2708.547664] env[61663]: DEBUG nova.compute.utils [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2708.548982] env[61663]: DEBUG nova.compute.manager [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2708.549595] env[61663]: DEBUG nova.network.neutron [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2708.558305] env[61663]: DEBUG nova.compute.manager [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2708.622664] env[61663]: DEBUG nova.compute.manager [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2708.648776] env[61663]: DEBUG nova.virt.hardware [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2708.649038] env[61663]: DEBUG nova.virt.hardware [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2708.649207] env[61663]: DEBUG nova.virt.hardware [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2708.649395] env[61663]: DEBUG nova.virt.hardware [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2708.649766] env[61663]: DEBUG nova.virt.hardware [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2708.649766] env[61663]: DEBUG nova.virt.hardware [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2708.649898] env[61663]: DEBUG nova.virt.hardware [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2708.650076] env[61663]: DEBUG nova.virt.hardware [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2708.650250] env[61663]: DEBUG nova.virt.hardware [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2708.650417] env[61663]: DEBUG nova.virt.hardware [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2708.650590] env[61663]: DEBUG nova.virt.hardware [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2708.651486] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a9f71a3-312e-4446-a358-b582a4bd9702 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2708.655178] env[61663]: DEBUG nova.policy [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f50378c5b326455197df095cae766a35', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '006ceb0b9457465daa8ad6d60e85c1f3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 2708.661652] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ed86a8-58f3-47e1-8b19-c0eb759e16c6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2708.956195] env[61663]: DEBUG nova.network.neutron [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Successfully created port: 6c131ccc-9d41-44d9-8d54-6ea64cc2b628 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2709.690112] env[61663]: DEBUG nova.network.neutron [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Successfully updated port: 6c131ccc-9d41-44d9-8d54-6ea64cc2b628 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2709.704518] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Acquiring lock "refresh_cache-bcc3a109-50ca-4a22-90f3-609231a3e95f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2709.704661] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Acquired lock "refresh_cache-bcc3a109-50ca-4a22-90f3-609231a3e95f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2709.704810] env[61663]: DEBUG nova.network.neutron [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2709.799538] env[61663]: DEBUG nova.network.neutron [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2710.026558] env[61663]: DEBUG nova.compute.manager [req-d592e6a7-3ac9-480f-90a0-6fe711a1329e req-7793864b-84b0-4197-a675-9643fa069b2e service nova] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Received event network-vif-plugged-6c131ccc-9d41-44d9-8d54-6ea64cc2b628 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2710.026783] env[61663]: DEBUG oslo_concurrency.lockutils [req-d592e6a7-3ac9-480f-90a0-6fe711a1329e req-7793864b-84b0-4197-a675-9643fa069b2e service nova] Acquiring lock "bcc3a109-50ca-4a22-90f3-609231a3e95f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2710.026994] env[61663]: DEBUG oslo_concurrency.lockutils [req-d592e6a7-3ac9-480f-90a0-6fe711a1329e req-7793864b-84b0-4197-a675-9643fa069b2e service nova] Lock "bcc3a109-50ca-4a22-90f3-609231a3e95f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2710.027366] env[61663]: DEBUG oslo_concurrency.lockutils [req-d592e6a7-3ac9-480f-90a0-6fe711a1329e req-7793864b-84b0-4197-a675-9643fa069b2e service nova] Lock "bcc3a109-50ca-4a22-90f3-609231a3e95f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2710.027521] env[61663]: DEBUG nova.compute.manager [req-d592e6a7-3ac9-480f-90a0-6fe711a1329e req-7793864b-84b0-4197-a675-9643fa069b2e service nova] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] No waiting events found dispatching network-vif-plugged-6c131ccc-9d41-44d9-8d54-6ea64cc2b628 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2710.027693] env[61663]: WARNING nova.compute.manager [req-d592e6a7-3ac9-480f-90a0-6fe711a1329e req-7793864b-84b0-4197-a675-9643fa069b2e service nova] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Received unexpected event network-vif-plugged-6c131ccc-9d41-44d9-8d54-6ea64cc2b628 for instance with vm_state building and task_state spawning. [ 2710.027856] env[61663]: DEBUG nova.compute.manager [req-d592e6a7-3ac9-480f-90a0-6fe711a1329e req-7793864b-84b0-4197-a675-9643fa069b2e service nova] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Received event network-changed-6c131ccc-9d41-44d9-8d54-6ea64cc2b628 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2710.028032] env[61663]: DEBUG nova.compute.manager [req-d592e6a7-3ac9-480f-90a0-6fe711a1329e req-7793864b-84b0-4197-a675-9643fa069b2e service nova] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Refreshing instance network info cache due to event network-changed-6c131ccc-9d41-44d9-8d54-6ea64cc2b628. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2710.028210] env[61663]: DEBUG oslo_concurrency.lockutils [req-d592e6a7-3ac9-480f-90a0-6fe711a1329e req-7793864b-84b0-4197-a675-9643fa069b2e service nova] Acquiring lock "refresh_cache-bcc3a109-50ca-4a22-90f3-609231a3e95f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2710.045112] env[61663]: DEBUG nova.network.neutron [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Updating instance_info_cache with network_info: [{"id": "6c131ccc-9d41-44d9-8d54-6ea64cc2b628", "address": "fa:16:3e:a3:2c:20", "network": {"id": "0022b0c5-4b0f-4449-a9f5-4008ee2c4e6d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-70195467-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "006ceb0b9457465daa8ad6d60e85c1f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f246b87-f105-4b33-a71d-5caf8e99e074", "external-id": "nsx-vlan-transportzone-583", "segmentation_id": 583, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c131ccc-9d", "ovs_interfaceid": "6c131ccc-9d41-44d9-8d54-6ea64cc2b628", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2710.058249] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Releasing lock "refresh_cache-bcc3a109-50ca-4a22-90f3-609231a3e95f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2710.058513] env[61663]: DEBUG nova.compute.manager [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Instance network_info: |[{"id": "6c131ccc-9d41-44d9-8d54-6ea64cc2b628", "address": "fa:16:3e:a3:2c:20", "network": {"id": "0022b0c5-4b0f-4449-a9f5-4008ee2c4e6d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-70195467-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "006ceb0b9457465daa8ad6d60e85c1f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f246b87-f105-4b33-a71d-5caf8e99e074", "external-id": "nsx-vlan-transportzone-583", "segmentation_id": 583, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c131ccc-9d", "ovs_interfaceid": "6c131ccc-9d41-44d9-8d54-6ea64cc2b628", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2710.058791] env[61663]: DEBUG oslo_concurrency.lockutils [req-d592e6a7-3ac9-480f-90a0-6fe711a1329e req-7793864b-84b0-4197-a675-9643fa069b2e service nova] Acquired lock "refresh_cache-bcc3a109-50ca-4a22-90f3-609231a3e95f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2710.058967] env[61663]: DEBUG nova.network.neutron [req-d592e6a7-3ac9-480f-90a0-6fe711a1329e req-7793864b-84b0-4197-a675-9643fa069b2e service nova] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Refreshing network info cache for port 6c131ccc-9d41-44d9-8d54-6ea64cc2b628 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2710.059990] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:2c:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f246b87-f105-4b33-a71d-5caf8e99e074', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c131ccc-9d41-44d9-8d54-6ea64cc2b628', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2710.068296] env[61663]: DEBUG oslo.service.loopingcall [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2710.071302] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2710.071736] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-71024f41-0061-4a40-a6fa-d9fcae019c6c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2710.092021] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2710.092021] env[61663]: value = "task-1690899" [ 2710.092021] env[61663]: _type = "Task" [ 2710.092021] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2710.102447] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690899, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2710.320571] env[61663]: DEBUG nova.network.neutron [req-d592e6a7-3ac9-480f-90a0-6fe711a1329e req-7793864b-84b0-4197-a675-9643fa069b2e service nova] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Updated VIF entry in instance network info cache for port 6c131ccc-9d41-44d9-8d54-6ea64cc2b628. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2710.321049] env[61663]: DEBUG nova.network.neutron [req-d592e6a7-3ac9-480f-90a0-6fe711a1329e req-7793864b-84b0-4197-a675-9643fa069b2e service nova] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Updating instance_info_cache with network_info: [{"id": "6c131ccc-9d41-44d9-8d54-6ea64cc2b628", "address": "fa:16:3e:a3:2c:20", "network": {"id": "0022b0c5-4b0f-4449-a9f5-4008ee2c4e6d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-70195467-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "006ceb0b9457465daa8ad6d60e85c1f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f246b87-f105-4b33-a71d-5caf8e99e074", "external-id": "nsx-vlan-transportzone-583", "segmentation_id": 583, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c131ccc-9d", "ovs_interfaceid": "6c131ccc-9d41-44d9-8d54-6ea64cc2b628", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2710.331201] env[61663]: DEBUG oslo_concurrency.lockutils [req-d592e6a7-3ac9-480f-90a0-6fe711a1329e req-7793864b-84b0-4197-a675-9643fa069b2e service nova] Releasing lock "refresh_cache-bcc3a109-50ca-4a22-90f3-609231a3e95f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2710.602245] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690899, 'name': CreateVM_Task, 'duration_secs': 0.332197} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2710.602381] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2710.603064] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2710.603237] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2710.603554] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2710.603801] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28e5192a-d11f-47fb-8009-b1a2b0000271 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2710.607990] env[61663]: DEBUG oslo_vmware.api [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Waiting for the task: (returnval){ [ 2710.607990] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5214f916-ac9a-025f-8eea-a41702e87a46" [ 2710.607990] env[61663]: _type = "Task" [ 2710.607990] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2710.615518] env[61663]: DEBUG oslo_vmware.api [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5214f916-ac9a-025f-8eea-a41702e87a46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2711.119062] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2711.119062] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2711.119062] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a61d8cd0-8aba-4baa-9182-67d296e337bc tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2723.884039] env[61663]: DEBUG oslo_concurrency.lockutils [None req-8e29d146-947e-4861-81b6-a1eb843870d4 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Acquiring lock "bcc3a109-50ca-4a22-90f3-609231a3e95f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2740.701275] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2744.693424] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2750.688678] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2751.692495] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2753.691937] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2753.692296] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2753.692296] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2753.713695] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2753.713844] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2753.713969] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2753.714108] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2753.714232] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2753.714389] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2753.714523] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2753.714646] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2753.714764] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2753.714881] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2753.715008] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2753.715507] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2753.726583] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2753.726792] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2753.726956] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2753.727120] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2753.728226] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d5f4bf8-b570-49d2-ad76-4143e82b0d04 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2753.736859] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb1cf1c5-9529-49d0-af95-1231fae96065 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2753.750289] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-256218f6-8195-46d7-8e7e-7c62b520cd82 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2753.756286] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46ef7a2-c1a6-4bf4-88a4-55966712e0fa {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2753.786599] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181312MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2753.786731] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2753.786923] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2753.859013] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 202e0f58-b057-4e57-8a92-c06d6efda570 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2753.859197] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 67f8162d-a631-4f0e-b03c-fd76ee131615 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2753.859316] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 029ec7ad-96a1-42e0-a926-c1aab1de05a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2753.859441] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 0adee33d-8d0c-4bcf-8df4-11465be00485 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2753.859563] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2753.859679] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2753.859795] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 8fc36ed9-9315-4bdb-b4f3-248106a3c681 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2753.859910] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 0266b3f5-ee31-46d7-af5e-844a27bfd829 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2753.860050] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 911c036c-c7d8-4ff7-b874-335361fb5281 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2753.860196] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance bcc3a109-50ca-4a22-90f3-609231a3e95f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2753.870449] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b98fdfac-2912-403c-a087-46e8eaf40829 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2753.870670] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2753.870815] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2753.998173] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb76a2c-8db2-481f-821d-7017de17fbf0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2754.006034] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34214ec9-a116-44f2-95b3-c05dec666ec3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2754.035744] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eb056a5-c063-482d-868a-d99fa67b64f3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2754.042040] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-913abbe2-4526-45ec-9c0e-5f7de5d0040a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2754.054605] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2754.062699] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2754.075896] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2754.075980] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.289s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2755.052454] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2755.692358] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2756.872559] env[61663]: WARNING oslo_vmware.rw_handles [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2756.872559] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2756.872559] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2756.872559] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2756.872559] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2756.872559] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 2756.872559] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2756.872559] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2756.872559] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2756.872559] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2756.872559] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2756.872559] env[61663]: ERROR oslo_vmware.rw_handles [ 2756.873401] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/ac431b28-e782-4a16-acc1-1d91ddef4a7f/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2756.874922] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2756.875213] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Copying Virtual Disk [datastore1] vmware_temp/ac431b28-e782-4a16-acc1-1d91ddef4a7f/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/ac431b28-e782-4a16-acc1-1d91ddef4a7f/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2756.875494] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8eeeef51-b441-434d-8ff3-4e4efc81d23d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2756.882733] env[61663]: DEBUG oslo_vmware.api [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Waiting for the task: (returnval){ [ 2756.882733] env[61663]: value = "task-1690900" [ 2756.882733] env[61663]: _type = "Task" [ 2756.882733] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2756.890316] env[61663]: DEBUG oslo_vmware.api [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Task: {'id': task-1690900, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2757.393088] env[61663]: DEBUG oslo_vmware.exceptions [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2757.393453] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2757.394062] env[61663]: ERROR nova.compute.manager [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2757.394062] env[61663]: Faults: ['InvalidArgument'] [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Traceback (most recent call last): [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] yield resources [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] self.driver.spawn(context, instance, image_meta, [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] self._fetch_image_if_missing(context, vi) [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] image_cache(vi, tmp_image_ds_loc) [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] vm_util.copy_virtual_disk( [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] session._wait_for_task(vmdk_copy_task) [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] return self.wait_for_task(task_ref) [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] return evt.wait() [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] result = hub.switch() [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] return self.greenlet.switch() [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] self.f(*self.args, **self.kw) [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] raise exceptions.translate_fault(task_info.error) [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Faults: ['InvalidArgument'] [ 2757.394062] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] [ 2757.395519] env[61663]: INFO nova.compute.manager [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Terminating instance [ 2757.395959] env[61663]: DEBUG oslo_concurrency.lockutils [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2757.396185] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2757.396440] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b0e5822-add0-4b9e-8255-c35d01cf03fe {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2757.398602] env[61663]: DEBUG nova.compute.manager [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2757.398799] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2757.399522] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af017785-6d0f-4809-8f37-c15bec0e4db8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2757.409271] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2757.409510] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc4fb0b1-10c8-4668-aaac-2f9766c7a535 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2757.411648] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2757.411805] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2757.412778] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45114fbc-e179-4422-a757-35bd6c291ef4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2757.417230] env[61663]: DEBUG oslo_vmware.api [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Waiting for the task: (returnval){ [ 2757.417230] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52feb96a-bcde-e5d0-71a9-d25bb5f82cf4" [ 2757.417230] env[61663]: _type = "Task" [ 2757.417230] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2757.429479] env[61663]: DEBUG oslo_vmware.api [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52feb96a-bcde-e5d0-71a9-d25bb5f82cf4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2757.479384] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2757.479603] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2757.479783] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Deleting the datastore file [datastore1] 202e0f58-b057-4e57-8a92-c06d6efda570 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2757.480053] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6e04a002-54ae-4438-a1c2-962bfeca30b7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2757.485816] env[61663]: DEBUG oslo_vmware.api [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Waiting for the task: (returnval){ [ 2757.485816] env[61663]: value = "task-1690902" [ 2757.485816] env[61663]: _type = "Task" [ 2757.485816] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2757.492946] env[61663]: DEBUG oslo_vmware.api [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Task: {'id': task-1690902, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2757.691848] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2757.692011] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2757.927738] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2757.928081] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Creating directory with path [datastore1] vmware_temp/d5dadc80-8e6a-4109-8536-8c5aef0081ac/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2757.928237] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b55764b0-3e1c-41e6-ab7c-7926381c4a63 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2757.940253] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Created directory with path [datastore1] vmware_temp/d5dadc80-8e6a-4109-8536-8c5aef0081ac/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2757.940445] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Fetch image to [datastore1] vmware_temp/d5dadc80-8e6a-4109-8536-8c5aef0081ac/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2757.940607] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/d5dadc80-8e6a-4109-8536-8c5aef0081ac/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2757.941346] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34484418-d583-4dc4-a7c8-7fd4fd32fdfa {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2757.947840] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93e63e4-598a-4e29-bd73-05e193d6149b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2757.956836] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a2cee9d-a5a1-41de-85db-8bf08f451469 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2757.989855] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-885d3cec-ee6e-4617-8e3f-4133a92ff5a5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2757.998444] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5524f54f-2cba-41a0-a0b6-8aa67fd007a0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2758.000138] env[61663]: DEBUG oslo_vmware.api [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Task: {'id': task-1690902, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073675} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2758.000377] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2758.000560] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2758.000734] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2758.000908] env[61663]: INFO nova.compute.manager [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2758.003012] env[61663]: DEBUG nova.compute.claims [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2758.003191] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2758.003431] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2758.021324] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2758.078076] env[61663]: DEBUG oslo_vmware.rw_handles [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d5dadc80-8e6a-4109-8536-8c5aef0081ac/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2758.139285] env[61663]: DEBUG oslo_vmware.rw_handles [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2758.139483] env[61663]: DEBUG oslo_vmware.rw_handles [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d5dadc80-8e6a-4109-8536-8c5aef0081ac/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2758.241842] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b48b4b9-3dcb-414f-a655-6e67f1cdc660 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2758.249770] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ab2f50-0e01-440d-9319-8106199943f0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2758.279517] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d88541-200a-4bed-ae28-77ee226e7717 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2758.286657] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59bdc21-c327-4faf-8f99-a1ed55d1785f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2758.299274] env[61663]: DEBUG nova.compute.provider_tree [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2758.308261] env[61663]: DEBUG nova.scheduler.client.report [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2758.321668] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.318s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2758.322220] env[61663]: ERROR nova.compute.manager [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2758.322220] env[61663]: Faults: ['InvalidArgument'] [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Traceback (most recent call last): [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] self.driver.spawn(context, instance, image_meta, [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] self._fetch_image_if_missing(context, vi) [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] image_cache(vi, tmp_image_ds_loc) [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] vm_util.copy_virtual_disk( [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] session._wait_for_task(vmdk_copy_task) [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] return self.wait_for_task(task_ref) [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] return evt.wait() [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] result = hub.switch() [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] return self.greenlet.switch() [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] self.f(*self.args, **self.kw) [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] raise exceptions.translate_fault(task_info.error) [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Faults: ['InvalidArgument'] [ 2758.322220] env[61663]: ERROR nova.compute.manager [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] [ 2758.322918] env[61663]: DEBUG nova.compute.utils [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2758.324348] env[61663]: DEBUG nova.compute.manager [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Build of instance 202e0f58-b057-4e57-8a92-c06d6efda570 was re-scheduled: A specified parameter was not correct: fileType [ 2758.324348] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2758.324710] env[61663]: DEBUG nova.compute.manager [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2758.324887] env[61663]: DEBUG nova.compute.manager [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2758.325072] env[61663]: DEBUG nova.compute.manager [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2758.325245] env[61663]: DEBUG nova.network.neutron [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2758.655111] env[61663]: DEBUG nova.network.neutron [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2758.667597] env[61663]: INFO nova.compute.manager [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Took 0.34 seconds to deallocate network for instance. [ 2758.759137] env[61663]: INFO nova.scheduler.client.report [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Deleted allocations for instance 202e0f58-b057-4e57-8a92-c06d6efda570 [ 2758.784157] env[61663]: DEBUG oslo_concurrency.lockutils [None req-ca5f767c-c81d-4e87-9992-61b193ee8985 tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Lock "202e0f58-b057-4e57-8a92-c06d6efda570" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 626.499s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2758.785424] env[61663]: DEBUG oslo_concurrency.lockutils [None req-27eed43f-f341-42ce-9e01-8799429a272e tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Lock "202e0f58-b057-4e57-8a92-c06d6efda570" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 429.552s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2758.785646] env[61663]: DEBUG oslo_concurrency.lockutils [None req-27eed43f-f341-42ce-9e01-8799429a272e tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Acquiring lock "202e0f58-b057-4e57-8a92-c06d6efda570-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2758.785856] env[61663]: DEBUG oslo_concurrency.lockutils [None req-27eed43f-f341-42ce-9e01-8799429a272e tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Lock "202e0f58-b057-4e57-8a92-c06d6efda570-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2758.786053] env[61663]: DEBUG oslo_concurrency.lockutils [None req-27eed43f-f341-42ce-9e01-8799429a272e tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Lock "202e0f58-b057-4e57-8a92-c06d6efda570-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2758.788043] env[61663]: INFO nova.compute.manager [None req-27eed43f-f341-42ce-9e01-8799429a272e tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Terminating instance [ 2758.789778] env[61663]: DEBUG nova.compute.manager [None req-27eed43f-f341-42ce-9e01-8799429a272e tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2758.789975] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-27eed43f-f341-42ce-9e01-8799429a272e tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2758.790446] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6263a0a9-06e3-4790-b598-07a8a29169ac {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2758.800649] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d71425-abc9-4db3-9f93-7fd5cd61baa5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2758.811712] env[61663]: DEBUG nova.compute.manager [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2758.833720] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-27eed43f-f341-42ce-9e01-8799429a272e tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 202e0f58-b057-4e57-8a92-c06d6efda570 could not be found. [ 2758.833929] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-27eed43f-f341-42ce-9e01-8799429a272e tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2758.834122] env[61663]: INFO nova.compute.manager [None req-27eed43f-f341-42ce-9e01-8799429a272e tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2758.834368] env[61663]: DEBUG oslo.service.loopingcall [None req-27eed43f-f341-42ce-9e01-8799429a272e tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2758.834624] env[61663]: DEBUG nova.compute.manager [-] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2758.834722] env[61663]: DEBUG nova.network.neutron [-] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2758.857197] env[61663]: DEBUG oslo_concurrency.lockutils [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2758.857405] env[61663]: DEBUG oslo_concurrency.lockutils [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2758.858812] env[61663]: INFO nova.compute.claims [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2758.861549] env[61663]: DEBUG nova.network.neutron [-] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2758.868949] env[61663]: INFO nova.compute.manager [-] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] Took 0.03 seconds to deallocate network for instance. [ 2758.961360] env[61663]: DEBUG oslo_concurrency.lockutils [None req-27eed43f-f341-42ce-9e01-8799429a272e tempest-SecurityGroupsTestJSON-400487944 tempest-SecurityGroupsTestJSON-400487944-project-member] Lock "202e0f58-b057-4e57-8a92-c06d6efda570" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.176s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2758.962550] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "202e0f58-b057-4e57-8a92-c06d6efda570" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 342.875s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2758.962796] env[61663]: INFO nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 202e0f58-b057-4e57-8a92-c06d6efda570] During sync_power_state the instance has a pending task (deleting). Skip. [ 2758.963010] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "202e0f58-b057-4e57-8a92-c06d6efda570" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2759.031171] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3878da2-83d6-41b7-a30c-053b8002827b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2759.038867] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423a4ddc-fbdc-4a5e-9178-1b75d423fcdb {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2759.069022] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-718671b4-b0dd-4fc8-aeac-d7d5f2c6f93d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2759.075645] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a16e523-60b1-4800-9ccc-44825bec57bc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2759.088552] env[61663]: DEBUG nova.compute.provider_tree [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2759.097349] env[61663]: DEBUG nova.scheduler.client.report [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2759.110929] env[61663]: DEBUG oslo_concurrency.lockutils [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.253s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2759.111437] env[61663]: DEBUG nova.compute.manager [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2759.146159] env[61663]: DEBUG nova.compute.utils [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2759.147432] env[61663]: DEBUG nova.compute.manager [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2759.147609] env[61663]: DEBUG nova.network.neutron [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2759.156851] env[61663]: DEBUG nova.compute.manager [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2759.220526] env[61663]: DEBUG nova.policy [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9ecb588edff64911bf5120de68b010eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8b90f6021c544484902ae30054503895', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 2759.223902] env[61663]: DEBUG nova.compute.manager [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2759.251663] env[61663]: DEBUG nova.virt.hardware [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2759.251937] env[61663]: DEBUG nova.virt.hardware [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2759.252156] env[61663]: DEBUG nova.virt.hardware [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2759.252357] env[61663]: DEBUG nova.virt.hardware [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2759.252540] env[61663]: DEBUG nova.virt.hardware [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2759.252709] env[61663]: DEBUG nova.virt.hardware [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2759.252945] env[61663]: DEBUG nova.virt.hardware [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2759.253135] env[61663]: DEBUG nova.virt.hardware [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2759.253321] env[61663]: DEBUG nova.virt.hardware [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2759.253544] env[61663]: DEBUG nova.virt.hardware [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2759.253752] env[61663]: DEBUG nova.virt.hardware [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2759.254722] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a294e85-8d4e-4b8b-95ab-ce4b66a1d8cd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2759.263108] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86fa9f21-b7b1-4bc5-ac89-4f554ce90e8b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2759.538522] env[61663]: DEBUG nova.network.neutron [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Successfully created port: 065a03f8-11ec-4acb-bc15-f2680f0f59e7 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2760.398873] env[61663]: DEBUG nova.network.neutron [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Successfully updated port: 065a03f8-11ec-4acb-bc15-f2680f0f59e7 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2760.409646] env[61663]: DEBUG oslo_concurrency.lockutils [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquiring lock "refresh_cache-b98fdfac-2912-403c-a087-46e8eaf40829" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2760.409798] env[61663]: DEBUG oslo_concurrency.lockutils [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquired lock "refresh_cache-b98fdfac-2912-403c-a087-46e8eaf40829" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2760.409943] env[61663]: DEBUG nova.network.neutron [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2760.450804] env[61663]: DEBUG nova.network.neutron [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2760.607405] env[61663]: DEBUG nova.network.neutron [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Updating instance_info_cache with network_info: [{"id": "065a03f8-11ec-4acb-bc15-f2680f0f59e7", "address": "fa:16:3e:e4:1c:58", "network": {"id": "bd3a627c-746f-4e8f-8223-f0f3b30c965b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-924597021-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b90f6021c544484902ae30054503895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap065a03f8-11", "ovs_interfaceid": "065a03f8-11ec-4acb-bc15-f2680f0f59e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2760.618059] env[61663]: DEBUG oslo_concurrency.lockutils [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Releasing lock "refresh_cache-b98fdfac-2912-403c-a087-46e8eaf40829" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2760.618335] env[61663]: DEBUG nova.compute.manager [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Instance network_info: |[{"id": "065a03f8-11ec-4acb-bc15-f2680f0f59e7", "address": "fa:16:3e:e4:1c:58", "network": {"id": "bd3a627c-746f-4e8f-8223-f0f3b30c965b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-924597021-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b90f6021c544484902ae30054503895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap065a03f8-11", "ovs_interfaceid": "065a03f8-11ec-4acb-bc15-f2680f0f59e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2760.618720] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:1c:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1c797172-a569-458e-aeb0-3f21e589a740', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '065a03f8-11ec-4acb-bc15-f2680f0f59e7', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2760.626534] env[61663]: DEBUG oslo.service.loopingcall [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2760.627012] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2760.627260] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6e78bcae-9a38-4ad1-aa88-2c1181d63dbe {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2760.647457] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2760.647457] env[61663]: value = "task-1690903" [ 2760.647457] env[61663]: _type = "Task" [ 2760.647457] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2760.655212] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690903, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2760.688901] env[61663]: DEBUG nova.compute.manager [req-73dfc8a9-b9ab-4ffc-b859-bf674d009e23 req-abe475c6-bec1-4a33-a66f-62377a54c1d3 service nova] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Received event network-vif-plugged-065a03f8-11ec-4acb-bc15-f2680f0f59e7 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2760.689191] env[61663]: DEBUG oslo_concurrency.lockutils [req-73dfc8a9-b9ab-4ffc-b859-bf674d009e23 req-abe475c6-bec1-4a33-a66f-62377a54c1d3 service nova] Acquiring lock "b98fdfac-2912-403c-a087-46e8eaf40829-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2760.689399] env[61663]: DEBUG oslo_concurrency.lockutils [req-73dfc8a9-b9ab-4ffc-b859-bf674d009e23 req-abe475c6-bec1-4a33-a66f-62377a54c1d3 service nova] Lock "b98fdfac-2912-403c-a087-46e8eaf40829-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2760.689603] env[61663]: DEBUG oslo_concurrency.lockutils [req-73dfc8a9-b9ab-4ffc-b859-bf674d009e23 req-abe475c6-bec1-4a33-a66f-62377a54c1d3 service nova] Lock "b98fdfac-2912-403c-a087-46e8eaf40829-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2760.689826] env[61663]: DEBUG nova.compute.manager [req-73dfc8a9-b9ab-4ffc-b859-bf674d009e23 req-abe475c6-bec1-4a33-a66f-62377a54c1d3 service nova] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] No waiting events found dispatching network-vif-plugged-065a03f8-11ec-4acb-bc15-f2680f0f59e7 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2760.690013] env[61663]: WARNING nova.compute.manager [req-73dfc8a9-b9ab-4ffc-b859-bf674d009e23 req-abe475c6-bec1-4a33-a66f-62377a54c1d3 service nova] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Received unexpected event network-vif-plugged-065a03f8-11ec-4acb-bc15-f2680f0f59e7 for instance with vm_state building and task_state spawning. [ 2760.690184] env[61663]: DEBUG nova.compute.manager [req-73dfc8a9-b9ab-4ffc-b859-bf674d009e23 req-abe475c6-bec1-4a33-a66f-62377a54c1d3 service nova] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Received event network-changed-065a03f8-11ec-4acb-bc15-f2680f0f59e7 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2760.690344] env[61663]: DEBUG nova.compute.manager [req-73dfc8a9-b9ab-4ffc-b859-bf674d009e23 req-abe475c6-bec1-4a33-a66f-62377a54c1d3 service nova] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Refreshing instance network info cache due to event network-changed-065a03f8-11ec-4acb-bc15-f2680f0f59e7. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2760.690530] env[61663]: DEBUG oslo_concurrency.lockutils [req-73dfc8a9-b9ab-4ffc-b859-bf674d009e23 req-abe475c6-bec1-4a33-a66f-62377a54c1d3 service nova] Acquiring lock "refresh_cache-b98fdfac-2912-403c-a087-46e8eaf40829" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2760.690695] env[61663]: DEBUG oslo_concurrency.lockutils [req-73dfc8a9-b9ab-4ffc-b859-bf674d009e23 req-abe475c6-bec1-4a33-a66f-62377a54c1d3 service nova] Acquired lock "refresh_cache-b98fdfac-2912-403c-a087-46e8eaf40829" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2760.690866] env[61663]: DEBUG nova.network.neutron [req-73dfc8a9-b9ab-4ffc-b859-bf674d009e23 req-abe475c6-bec1-4a33-a66f-62377a54c1d3 service nova] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Refreshing network info cache for port 065a03f8-11ec-4acb-bc15-f2680f0f59e7 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2760.989411] env[61663]: DEBUG nova.network.neutron [req-73dfc8a9-b9ab-4ffc-b859-bf674d009e23 req-abe475c6-bec1-4a33-a66f-62377a54c1d3 service nova] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Updated VIF entry in instance network info cache for port 065a03f8-11ec-4acb-bc15-f2680f0f59e7. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2760.989779] env[61663]: DEBUG nova.network.neutron [req-73dfc8a9-b9ab-4ffc-b859-bf674d009e23 req-abe475c6-bec1-4a33-a66f-62377a54c1d3 service nova] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Updating instance_info_cache with network_info: [{"id": "065a03f8-11ec-4acb-bc15-f2680f0f59e7", "address": "fa:16:3e:e4:1c:58", "network": {"id": "bd3a627c-746f-4e8f-8223-f0f3b30c965b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-924597021-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b90f6021c544484902ae30054503895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap065a03f8-11", "ovs_interfaceid": "065a03f8-11ec-4acb-bc15-f2680f0f59e7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2760.999145] env[61663]: DEBUG oslo_concurrency.lockutils [req-73dfc8a9-b9ab-4ffc-b859-bf674d009e23 req-abe475c6-bec1-4a33-a66f-62377a54c1d3 service nova] Releasing lock "refresh_cache-b98fdfac-2912-403c-a087-46e8eaf40829" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2761.157306] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690903, 'name': CreateVM_Task, 'duration_secs': 0.322834} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2761.157435] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2761.158119] env[61663]: DEBUG oslo_concurrency.lockutils [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2761.158293] env[61663]: DEBUG oslo_concurrency.lockutils [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2761.158611] env[61663]: DEBUG oslo_concurrency.lockutils [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2761.158857] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-607d65ad-f9dd-4c38-9e46-5348f23fd6ae {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2761.163192] env[61663]: DEBUG oslo_vmware.api [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Waiting for the task: (returnval){ [ 2761.163192] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52596c6e-7343-c175-78cf-2d6997b3a614" [ 2761.163192] env[61663]: _type = "Task" [ 2761.163192] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2761.175490] env[61663]: DEBUG oslo_vmware.api [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52596c6e-7343-c175-78cf-2d6997b3a614, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2761.672925] env[61663]: DEBUG oslo_concurrency.lockutils [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2761.673233] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2761.673431] env[61663]: DEBUG oslo_concurrency.lockutils [None req-561d081a-7b08-4b12-b49e-0b9b277e8e3a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2767.079784] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Acquiring lock "3930490f-586e-4bbd-aad2-1b4995ff6aa3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2767.080125] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Lock "3930490f-586e-4bbd-aad2-1b4995ff6aa3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2767.688293] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2800.692670] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2805.692568] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2805.983083] env[61663]: WARNING oslo_vmware.rw_handles [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2805.983083] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2805.983083] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2805.983083] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2805.983083] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2805.983083] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 2805.983083] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2805.983083] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2805.983083] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2805.983083] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2805.983083] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2805.983083] env[61663]: ERROR oslo_vmware.rw_handles [ 2805.983847] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/d5dadc80-8e6a-4109-8536-8c5aef0081ac/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2805.986926] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2805.986926] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Copying Virtual Disk [datastore1] vmware_temp/d5dadc80-8e6a-4109-8536-8c5aef0081ac/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/d5dadc80-8e6a-4109-8536-8c5aef0081ac/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2805.986926] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e7d59567-b344-4c29-b4d5-f15064da847d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2805.995844] env[61663]: DEBUG oslo_vmware.api [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Waiting for the task: (returnval){ [ 2805.995844] env[61663]: value = "task-1690904" [ 2805.995844] env[61663]: _type = "Task" [ 2805.995844] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2806.003599] env[61663]: DEBUG oslo_vmware.api [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Task: {'id': task-1690904, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2806.506462] env[61663]: DEBUG oslo_vmware.exceptions [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2806.507757] env[61663]: DEBUG oslo_concurrency.lockutils [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2806.509038] env[61663]: ERROR nova.compute.manager [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2806.509038] env[61663]: Faults: ['InvalidArgument'] [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Traceback (most recent call last): [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] yield resources [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] self.driver.spawn(context, instance, image_meta, [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] self._fetch_image_if_missing(context, vi) [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] image_cache(vi, tmp_image_ds_loc) [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] vm_util.copy_virtual_disk( [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] session._wait_for_task(vmdk_copy_task) [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] return self.wait_for_task(task_ref) [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] return evt.wait() [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] result = hub.switch() [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] return self.greenlet.switch() [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] self.f(*self.args, **self.kw) [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] raise exceptions.translate_fault(task_info.error) [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Faults: ['InvalidArgument'] [ 2806.509038] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] [ 2806.509038] env[61663]: INFO nova.compute.manager [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Terminating instance [ 2806.510503] env[61663]: DEBUG oslo_concurrency.lockutils [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2806.510716] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2806.510964] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c9e21899-8488-48c8-9696-ccd8455a5a4d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2806.513254] env[61663]: DEBUG nova.compute.manager [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2806.513454] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2806.514210] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2bbdb2d-c20c-425d-bf37-d65195fd89c9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2806.520983] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2806.521207] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-67dcb5a0-6782-4dca-a6b8-c78ea175b009 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2806.523380] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2806.523554] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2806.524496] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75c5e68d-d6cd-4c1c-a231-3080de5672e5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2806.528998] env[61663]: DEBUG oslo_vmware.api [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Waiting for the task: (returnval){ [ 2806.528998] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]523fddc6-ba66-1a18-ead1-2acdbdedcb73" [ 2806.528998] env[61663]: _type = "Task" [ 2806.528998] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2806.536077] env[61663]: DEBUG oslo_vmware.api [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]523fddc6-ba66-1a18-ead1-2acdbdedcb73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2806.595817] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2806.596033] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2806.596224] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Deleting the datastore file [datastore1] 67f8162d-a631-4f0e-b03c-fd76ee131615 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2806.596479] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-788145fb-d338-433b-9382-370de62f7dda {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2806.603063] env[61663]: DEBUG oslo_vmware.api [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Waiting for the task: (returnval){ [ 2806.603063] env[61663]: value = "task-1690906" [ 2806.603063] env[61663]: _type = "Task" [ 2806.603063] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2806.610306] env[61663]: DEBUG oslo_vmware.api [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Task: {'id': task-1690906, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2807.038960] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2807.039281] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Creating directory with path [datastore1] vmware_temp/69b2a424-69a2-4bde-912c-36ed8e7cd8f3/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2807.039463] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18fb20c2-56c6-4eb6-b77d-b049b168ba6a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.053907] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Created directory with path [datastore1] vmware_temp/69b2a424-69a2-4bde-912c-36ed8e7cd8f3/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2807.053907] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Fetch image to [datastore1] vmware_temp/69b2a424-69a2-4bde-912c-36ed8e7cd8f3/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2807.053907] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/69b2a424-69a2-4bde-912c-36ed8e7cd8f3/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2807.053907] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a84d9d0e-cfbe-4004-89f8-751d03fb7455 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.059089] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7f1102-38b0-4d7f-8257-e5102a8fda3c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.067923] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff3e6c6-f92e-4431-86df-2c32b1125951 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.097652] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63ca66f-a392-4975-bf16-4f792b3dea80 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.103076] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c874c7c6-4fd3-4a79-ac3e-c7de8b6f4e40 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.112235] env[61663]: DEBUG oslo_vmware.api [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Task: {'id': task-1690906, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.088456} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2807.112511] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2807.112733] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2807.112959] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2807.113216] env[61663]: INFO nova.compute.manager [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2807.115375] env[61663]: DEBUG nova.compute.claims [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2807.115524] env[61663]: DEBUG oslo_concurrency.lockutils [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2807.115695] env[61663]: DEBUG oslo_concurrency.lockutils [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2807.128745] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2807.182497] env[61663]: DEBUG oslo_vmware.rw_handles [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/69b2a424-69a2-4bde-912c-36ed8e7cd8f3/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2807.244256] env[61663]: DEBUG oslo_vmware.rw_handles [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2807.244466] env[61663]: DEBUG oslo_vmware.rw_handles [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/69b2a424-69a2-4bde-912c-36ed8e7cd8f3/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2807.346311] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-617675cd-6694-4d91-b809-3a07994a344e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.353734] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-229ea3ca-4ae4-4685-b55f-de848a541c5f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.383437] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8cfcff-b721-4c3f-b35c-159ee57cd629 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.389973] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-924690c2-90e4-4c88-8dc1-8650a024a95a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.402655] env[61663]: DEBUG nova.compute.provider_tree [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2807.412510] env[61663]: DEBUG nova.scheduler.client.report [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2807.426014] env[61663]: DEBUG oslo_concurrency.lockutils [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.310s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2807.426576] env[61663]: ERROR nova.compute.manager [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2807.426576] env[61663]: Faults: ['InvalidArgument'] [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Traceback (most recent call last): [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] self.driver.spawn(context, instance, image_meta, [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] self._fetch_image_if_missing(context, vi) [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] image_cache(vi, tmp_image_ds_loc) [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] vm_util.copy_virtual_disk( [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] session._wait_for_task(vmdk_copy_task) [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] return self.wait_for_task(task_ref) [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] return evt.wait() [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] result = hub.switch() [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] return self.greenlet.switch() [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] self.f(*self.args, **self.kw) [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] raise exceptions.translate_fault(task_info.error) [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Faults: ['InvalidArgument'] [ 2807.426576] env[61663]: ERROR nova.compute.manager [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] [ 2807.427367] env[61663]: DEBUG nova.compute.utils [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2807.428828] env[61663]: DEBUG nova.compute.manager [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Build of instance 67f8162d-a631-4f0e-b03c-fd76ee131615 was re-scheduled: A specified parameter was not correct: fileType [ 2807.428828] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2807.429223] env[61663]: DEBUG nova.compute.manager [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2807.429451] env[61663]: DEBUG nova.compute.manager [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2807.429602] env[61663]: DEBUG nova.compute.manager [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2807.429768] env[61663]: DEBUG nova.network.neutron [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2807.810964] env[61663]: DEBUG nova.network.neutron [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2807.827709] env[61663]: INFO nova.compute.manager [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Took 0.40 seconds to deallocate network for instance. [ 2807.924190] env[61663]: INFO nova.scheduler.client.report [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Deleted allocations for instance 67f8162d-a631-4f0e-b03c-fd76ee131615 [ 2807.946028] env[61663]: DEBUG oslo_concurrency.lockutils [None req-2c50c830-7318-436d-be6c-9c09dca67f3d tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Lock "67f8162d-a631-4f0e-b03c-fd76ee131615" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 633.108s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2807.947034] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c3f760b-67fe-46e9-8adb-c8d0d9924aa5 tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Lock "67f8162d-a631-4f0e-b03c-fd76ee131615" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 436.912s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2807.947258] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c3f760b-67fe-46e9-8adb-c8d0d9924aa5 tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Acquiring lock "67f8162d-a631-4f0e-b03c-fd76ee131615-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2807.947478] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c3f760b-67fe-46e9-8adb-c8d0d9924aa5 tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Lock "67f8162d-a631-4f0e-b03c-fd76ee131615-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2807.947728] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c3f760b-67fe-46e9-8adb-c8d0d9924aa5 tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Lock "67f8162d-a631-4f0e-b03c-fd76ee131615-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2807.950022] env[61663]: INFO nova.compute.manager [None req-5c3f760b-67fe-46e9-8adb-c8d0d9924aa5 tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Terminating instance [ 2807.951754] env[61663]: DEBUG nova.compute.manager [None req-5c3f760b-67fe-46e9-8adb-c8d0d9924aa5 tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2807.952035] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5c3f760b-67fe-46e9-8adb-c8d0d9924aa5 tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2807.952672] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4a3b3384-20cf-4063-9b11-74ed8217f4e1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.959270] env[61663]: DEBUG nova.compute.manager [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2807.967474] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f3a1ca-2a86-46b3-8fc6-66de239c262e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2807.995627] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-5c3f760b-67fe-46e9-8adb-c8d0d9924aa5 tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 67f8162d-a631-4f0e-b03c-fd76ee131615 could not be found. [ 2807.995839] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5c3f760b-67fe-46e9-8adb-c8d0d9924aa5 tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2807.996029] env[61663]: INFO nova.compute.manager [None req-5c3f760b-67fe-46e9-8adb-c8d0d9924aa5 tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2807.996754] env[61663]: DEBUG oslo.service.loopingcall [None req-5c3f760b-67fe-46e9-8adb-c8d0d9924aa5 tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2807.997211] env[61663]: DEBUG nova.compute.manager [-] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2807.997249] env[61663]: DEBUG nova.network.neutron [-] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2808.014163] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2808.014411] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2808.015935] env[61663]: INFO nova.compute.claims [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2808.032760] env[61663]: DEBUG nova.network.neutron [-] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2808.067518] env[61663]: INFO nova.compute.manager [-] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] Took 0.07 seconds to deallocate network for instance. [ 2808.153990] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5c3f760b-67fe-46e9-8adb-c8d0d9924aa5 tempest-ServerRescueNegativeTestJSON-1626982497 tempest-ServerRescueNegativeTestJSON-1626982497-project-member] Lock "67f8162d-a631-4f0e-b03c-fd76ee131615" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.207s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2808.155603] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "67f8162d-a631-4f0e-b03c-fd76ee131615" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 392.068s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2808.155727] env[61663]: INFO nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 67f8162d-a631-4f0e-b03c-fd76ee131615] During sync_power_state the instance has a pending task (deleting). Skip. [ 2808.155893] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "67f8162d-a631-4f0e-b03c-fd76ee131615" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2808.209618] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-643539f3-b4ef-4acc-a2c9-6adc7347588e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2808.217365] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3151535-9b8d-4345-8786-aab17fcbf937 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2808.246747] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-291c2c24-46c5-4d59-b11e-f15225c7a8d8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2808.253938] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fabbede6-9ffe-48b8-a0ab-b38229d02bb6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2808.267141] env[61663]: DEBUG nova.compute.provider_tree [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2808.276501] env[61663]: DEBUG nova.scheduler.client.report [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2808.292236] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.278s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2808.292754] env[61663]: DEBUG nova.compute.manager [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2808.335902] env[61663]: DEBUG nova.compute.utils [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2808.338477] env[61663]: DEBUG nova.compute.manager [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2808.338477] env[61663]: DEBUG nova.network.neutron [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2808.346059] env[61663]: DEBUG nova.compute.manager [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2808.411808] env[61663]: DEBUG nova.compute.manager [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2808.432859] env[61663]: DEBUG nova.policy [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ff4e0b0a90ee4eb1b471509c6a1dd60b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67f23651373c47be8ea682898e598f3b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 2808.436904] env[61663]: DEBUG nova.virt.hardware [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2808.437146] env[61663]: DEBUG nova.virt.hardware [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2808.437306] env[61663]: DEBUG nova.virt.hardware [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2808.437486] env[61663]: DEBUG nova.virt.hardware [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2808.437631] env[61663]: DEBUG nova.virt.hardware [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2808.437775] env[61663]: DEBUG nova.virt.hardware [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2808.437977] env[61663]: DEBUG nova.virt.hardware [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2808.438149] env[61663]: DEBUG nova.virt.hardware [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2808.438563] env[61663]: DEBUG nova.virt.hardware [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2808.438563] env[61663]: DEBUG nova.virt.hardware [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2808.438707] env[61663]: DEBUG nova.virt.hardware [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2808.439467] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a4c7538-a21d-4f7b-8ca4-3355e7f505f2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2808.447345] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-104e1258-f9d5-4d34-a296-22bd5e47e76b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2808.734179] env[61663]: DEBUG nova.network.neutron [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Successfully created port: bcdbaf8e-5ccd-4c35-8cae-fd7880177d88 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2809.742478] env[61663]: DEBUG nova.network.neutron [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Successfully updated port: bcdbaf8e-5ccd-4c35-8cae-fd7880177d88 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2809.755188] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Acquiring lock "refresh_cache-3930490f-586e-4bbd-aad2-1b4995ff6aa3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2809.755373] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Acquired lock "refresh_cache-3930490f-586e-4bbd-aad2-1b4995ff6aa3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2809.755530] env[61663]: DEBUG nova.network.neutron [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2809.798227] env[61663]: DEBUG nova.network.neutron [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2809.852583] env[61663]: DEBUG nova.compute.manager [req-d048855a-9252-452e-9447-4872ce1cbf34 req-dea7989c-7f0f-48a8-88d3-63c07d4d58e8 service nova] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Received event network-vif-plugged-bcdbaf8e-5ccd-4c35-8cae-fd7880177d88 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2809.852856] env[61663]: DEBUG oslo_concurrency.lockutils [req-d048855a-9252-452e-9447-4872ce1cbf34 req-dea7989c-7f0f-48a8-88d3-63c07d4d58e8 service nova] Acquiring lock "3930490f-586e-4bbd-aad2-1b4995ff6aa3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2809.853082] env[61663]: DEBUG oslo_concurrency.lockutils [req-d048855a-9252-452e-9447-4872ce1cbf34 req-dea7989c-7f0f-48a8-88d3-63c07d4d58e8 service nova] Lock "3930490f-586e-4bbd-aad2-1b4995ff6aa3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2809.853270] env[61663]: DEBUG oslo_concurrency.lockutils [req-d048855a-9252-452e-9447-4872ce1cbf34 req-dea7989c-7f0f-48a8-88d3-63c07d4d58e8 service nova] Lock "3930490f-586e-4bbd-aad2-1b4995ff6aa3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2809.853691] env[61663]: DEBUG nova.compute.manager [req-d048855a-9252-452e-9447-4872ce1cbf34 req-dea7989c-7f0f-48a8-88d3-63c07d4d58e8 service nova] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] No waiting events found dispatching network-vif-plugged-bcdbaf8e-5ccd-4c35-8cae-fd7880177d88 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2809.853691] env[61663]: WARNING nova.compute.manager [req-d048855a-9252-452e-9447-4872ce1cbf34 req-dea7989c-7f0f-48a8-88d3-63c07d4d58e8 service nova] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Received unexpected event network-vif-plugged-bcdbaf8e-5ccd-4c35-8cae-fd7880177d88 for instance with vm_state building and task_state spawning. [ 2809.853815] env[61663]: DEBUG nova.compute.manager [req-d048855a-9252-452e-9447-4872ce1cbf34 req-dea7989c-7f0f-48a8-88d3-63c07d4d58e8 service nova] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Received event network-changed-bcdbaf8e-5ccd-4c35-8cae-fd7880177d88 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2809.853899] env[61663]: DEBUG nova.compute.manager [req-d048855a-9252-452e-9447-4872ce1cbf34 req-dea7989c-7f0f-48a8-88d3-63c07d4d58e8 service nova] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Refreshing instance network info cache due to event network-changed-bcdbaf8e-5ccd-4c35-8cae-fd7880177d88. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2809.854076] env[61663]: DEBUG oslo_concurrency.lockutils [req-d048855a-9252-452e-9447-4872ce1cbf34 req-dea7989c-7f0f-48a8-88d3-63c07d4d58e8 service nova] Acquiring lock "refresh_cache-3930490f-586e-4bbd-aad2-1b4995ff6aa3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2809.961009] env[61663]: DEBUG nova.network.neutron [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Updating instance_info_cache with network_info: [{"id": "bcdbaf8e-5ccd-4c35-8cae-fd7880177d88", "address": "fa:16:3e:7f:69:2f", "network": {"id": "3ea6c793-2cad-4ae1-981e-e05421195e1f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1122825267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67f23651373c47be8ea682898e598f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcdbaf8e-5c", "ovs_interfaceid": "bcdbaf8e-5ccd-4c35-8cae-fd7880177d88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2809.972067] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Releasing lock "refresh_cache-3930490f-586e-4bbd-aad2-1b4995ff6aa3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2809.972360] env[61663]: DEBUG nova.compute.manager [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Instance network_info: |[{"id": "bcdbaf8e-5ccd-4c35-8cae-fd7880177d88", "address": "fa:16:3e:7f:69:2f", "network": {"id": "3ea6c793-2cad-4ae1-981e-e05421195e1f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1122825267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67f23651373c47be8ea682898e598f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcdbaf8e-5c", "ovs_interfaceid": "bcdbaf8e-5ccd-4c35-8cae-fd7880177d88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2809.972669] env[61663]: DEBUG oslo_concurrency.lockutils [req-d048855a-9252-452e-9447-4872ce1cbf34 req-dea7989c-7f0f-48a8-88d3-63c07d4d58e8 service nova] Acquired lock "refresh_cache-3930490f-586e-4bbd-aad2-1b4995ff6aa3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2809.972850] env[61663]: DEBUG nova.network.neutron [req-d048855a-9252-452e-9447-4872ce1cbf34 req-dea7989c-7f0f-48a8-88d3-63c07d4d58e8 service nova] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Refreshing network info cache for port bcdbaf8e-5ccd-4c35-8cae-fd7880177d88 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2809.973902] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:69:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7150f662-0cf1-44f9-ae14-d70f479649b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bcdbaf8e-5ccd-4c35-8cae-fd7880177d88', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2809.981610] env[61663]: DEBUG oslo.service.loopingcall [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2809.982430] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2809.984870] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3564ac3f-50e1-49cb-9911-e529c886836d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2810.005189] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2810.005189] env[61663]: value = "task-1690907" [ 2810.005189] env[61663]: _type = "Task" [ 2810.005189] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2810.012377] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690907, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2810.235531] env[61663]: DEBUG nova.network.neutron [req-d048855a-9252-452e-9447-4872ce1cbf34 req-dea7989c-7f0f-48a8-88d3-63c07d4d58e8 service nova] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Updated VIF entry in instance network info cache for port bcdbaf8e-5ccd-4c35-8cae-fd7880177d88. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2810.235918] env[61663]: DEBUG nova.network.neutron [req-d048855a-9252-452e-9447-4872ce1cbf34 req-dea7989c-7f0f-48a8-88d3-63c07d4d58e8 service nova] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Updating instance_info_cache with network_info: [{"id": "bcdbaf8e-5ccd-4c35-8cae-fd7880177d88", "address": "fa:16:3e:7f:69:2f", "network": {"id": "3ea6c793-2cad-4ae1-981e-e05421195e1f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1122825267-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67f23651373c47be8ea682898e598f3b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcdbaf8e-5c", "ovs_interfaceid": "bcdbaf8e-5ccd-4c35-8cae-fd7880177d88", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2810.245217] env[61663]: DEBUG oslo_concurrency.lockutils [req-d048855a-9252-452e-9447-4872ce1cbf34 req-dea7989c-7f0f-48a8-88d3-63c07d4d58e8 service nova] Releasing lock "refresh_cache-3930490f-586e-4bbd-aad2-1b4995ff6aa3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2810.514856] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690907, 'name': CreateVM_Task, 'duration_secs': 0.302948} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2810.514990] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2810.515644] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2810.515807] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2810.516143] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2810.516392] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7e08913-c4e3-4197-b627-bbe7b6805934 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2810.520492] env[61663]: DEBUG oslo_vmware.api [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Waiting for the task: (returnval){ [ 2810.520492] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f4d305-ab1a-bea0-6190-b70f7360b8fc" [ 2810.520492] env[61663]: _type = "Task" [ 2810.520492] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2810.527689] env[61663]: DEBUG oslo_vmware.api [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f4d305-ab1a-bea0-6190-b70f7360b8fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2811.030557] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2811.030863] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2811.030977] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c855670b-0724-4132-913e-e60ea67cf62a tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2812.687544] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2813.691689] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2813.692131] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2813.692131] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2813.714517] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2813.714681] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2813.714792] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2813.714919] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2813.715056] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2813.715227] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2813.715368] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2813.715510] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2813.715645] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2813.715765] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2813.715886] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2813.716402] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2813.716590] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2813.727197] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2813.727434] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2813.727606] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2813.727761] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2813.730368] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f051d2-37b4-4826-ac92-62331a847ea5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2813.738728] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a51a42ee-cea1-49b4-bda5-22fd44ed5439 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2813.752441] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cd91850-7e51-4734-9b5f-b323b49605e4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2813.758609] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6112b2c6-fbf9-4619-895c-eac5f6448c06 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2813.787235] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181313MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2813.787393] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2813.787586] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2813.879202] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 029ec7ad-96a1-42e0-a926-c1aab1de05a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2813.879401] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 0adee33d-8d0c-4bcf-8df4-11465be00485 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2813.879565] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2813.879718] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2813.879874] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 8fc36ed9-9315-4bdb-b4f3-248106a3c681 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2813.880044] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 0266b3f5-ee31-46d7-af5e-844a27bfd829 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2813.880204] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 911c036c-c7d8-4ff7-b874-335361fb5281 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2813.880351] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance bcc3a109-50ca-4a22-90f3-609231a3e95f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2813.880495] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b98fdfac-2912-403c-a087-46e8eaf40829 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2813.880636] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 3930490f-586e-4bbd-aad2-1b4995ff6aa3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2813.880856] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2813.881606] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2814.002515] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33947690-06e7-4ede-b952-3228e6bd04f3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2814.010358] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e717acf5-f464-41b2-a905-8634306cfa9d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2814.040560] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf4a2f6-04d8-47dc-bae0-280785cc6d4e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2814.047433] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34b70566-ebc6-4322-b8d9-6e1c2cb61316 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2814.060126] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2814.069032] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2814.083333] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2814.083522] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.296s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2815.059180] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2815.693073] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2819.692487] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2819.692826] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2830.032821] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "69194463-5c6d-4119-9c19-91b24149bd8b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2830.033223] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "69194463-5c6d-4119-9c19-91b24149bd8b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2854.614282] env[61663]: WARNING oslo_vmware.rw_handles [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2854.614282] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2854.614282] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2854.614282] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2854.614282] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2854.614282] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 2854.614282] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2854.614282] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2854.614282] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2854.614282] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2854.614282] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2854.614282] env[61663]: ERROR oslo_vmware.rw_handles [ 2854.614887] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/69b2a424-69a2-4bde-912c-36ed8e7cd8f3/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2854.617519] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2854.617794] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Copying Virtual Disk [datastore1] vmware_temp/69b2a424-69a2-4bde-912c-36ed8e7cd8f3/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/69b2a424-69a2-4bde-912c-36ed8e7cd8f3/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2854.618149] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-efed1acf-2548-4103-a220-7ed5f3cdf68a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2854.626378] env[61663]: DEBUG oslo_vmware.api [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Waiting for the task: (returnval){ [ 2854.626378] env[61663]: value = "task-1690908" [ 2854.626378] env[61663]: _type = "Task" [ 2854.626378] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2854.634274] env[61663]: DEBUG oslo_vmware.api [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Task: {'id': task-1690908, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2855.137062] env[61663]: DEBUG oslo_vmware.exceptions [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2855.137377] env[61663]: DEBUG oslo_concurrency.lockutils [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2855.137958] env[61663]: ERROR nova.compute.manager [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2855.137958] env[61663]: Faults: ['InvalidArgument'] [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Traceback (most recent call last): [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] yield resources [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] self.driver.spawn(context, instance, image_meta, [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] self._fetch_image_if_missing(context, vi) [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] image_cache(vi, tmp_image_ds_loc) [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] vm_util.copy_virtual_disk( [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] session._wait_for_task(vmdk_copy_task) [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] return self.wait_for_task(task_ref) [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] return evt.wait() [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] result = hub.switch() [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] return self.greenlet.switch() [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] self.f(*self.args, **self.kw) [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] raise exceptions.translate_fault(task_info.error) [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Faults: ['InvalidArgument'] [ 2855.137958] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] [ 2855.138816] env[61663]: INFO nova.compute.manager [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Terminating instance [ 2855.139989] env[61663]: DEBUG oslo_concurrency.lockutils [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2855.140229] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2855.140475] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38c28cda-84a4-4f62-a828-6f3a8d27c75c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2855.142705] env[61663]: DEBUG oslo_concurrency.lockutils [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Acquiring lock "refresh_cache-029ec7ad-96a1-42e0-a926-c1aab1de05a8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2855.142864] env[61663]: DEBUG oslo_concurrency.lockutils [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Acquired lock "refresh_cache-029ec7ad-96a1-42e0-a926-c1aab1de05a8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2855.143064] env[61663]: DEBUG nova.network.neutron [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2855.149811] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2855.149987] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2855.151184] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9169d3f9-ce54-471d-ab3a-31b7ffaf0b4e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2855.158264] env[61663]: DEBUG oslo_vmware.api [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Waiting for the task: (returnval){ [ 2855.158264] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5275b745-fb33-dae1-f2aa-40062701e613" [ 2855.158264] env[61663]: _type = "Task" [ 2855.158264] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2855.167097] env[61663]: DEBUG oslo_vmware.api [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5275b745-fb33-dae1-f2aa-40062701e613, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2855.173238] env[61663]: DEBUG nova.network.neutron [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2855.235016] env[61663]: DEBUG nova.network.neutron [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2855.243440] env[61663]: DEBUG oslo_concurrency.lockutils [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Releasing lock "refresh_cache-029ec7ad-96a1-42e0-a926-c1aab1de05a8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2855.243843] env[61663]: DEBUG nova.compute.manager [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2855.244051] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2855.245109] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd7cc04-ccac-48a6-bc04-6f1a5a944388 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2855.252797] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2855.253044] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-186501c2-8194-4392-9f52-6873893408c8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2855.281426] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2855.281635] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2855.281815] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Deleting the datastore file [datastore1] 029ec7ad-96a1-42e0-a926-c1aab1de05a8 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2855.282061] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ff103955-2f4f-40d6-bd34-aacbcd69e991 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2855.288129] env[61663]: DEBUG oslo_vmware.api [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Waiting for the task: (returnval){ [ 2855.288129] env[61663]: value = "task-1690910" [ 2855.288129] env[61663]: _type = "Task" [ 2855.288129] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2855.295155] env[61663]: DEBUG oslo_vmware.api [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Task: {'id': task-1690910, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2855.668606] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2855.668945] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Creating directory with path [datastore1] vmware_temp/0db372ba-6214-4d6c-a898-f78885e14300/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2855.669116] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a1cb7ca-8a80-4581-b543-902ccca22d52 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2855.680424] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Created directory with path [datastore1] vmware_temp/0db372ba-6214-4d6c-a898-f78885e14300/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2855.680617] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Fetch image to [datastore1] vmware_temp/0db372ba-6214-4d6c-a898-f78885e14300/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2855.680789] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/0db372ba-6214-4d6c-a898-f78885e14300/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2855.681523] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b29ee69-f029-4e3c-b105-64489f72ec71 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2855.688155] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b182688c-d162-4fab-a8ce-0ad6379bc89d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2855.696837] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a0e740-43fd-4f61-8b2a-5f5bb6ef2825 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2855.726593] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59293c8e-2a4c-4e5f-91cf-5ba1081b4773 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2855.732071] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f9936fea-f2bd-4b99-ba8c-90d93e3cc26a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2855.751247] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2855.796972] env[61663]: DEBUG oslo_vmware.api [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Task: {'id': task-1690910, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.044766} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2855.797197] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2855.797389] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2855.797566] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2855.798067] env[61663]: INFO nova.compute.manager [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Took 0.55 seconds to destroy the instance on the hypervisor. [ 2855.798067] env[61663]: DEBUG oslo.service.loopingcall [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2855.798226] env[61663]: DEBUG nova.compute.manager [-] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Skipping network deallocation for instance since networking was not requested. {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 2855.800254] env[61663]: DEBUG nova.compute.claims [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2855.800422] env[61663]: DEBUG oslo_concurrency.lockutils [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2855.800633] env[61663]: DEBUG oslo_concurrency.lockutils [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2855.921605] env[61663]: DEBUG oslo_vmware.rw_handles [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0db372ba-6214-4d6c-a898-f78885e14300/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2855.985518] env[61663]: DEBUG oslo_vmware.rw_handles [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2855.985722] env[61663]: DEBUG oslo_vmware.rw_handles [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0db372ba-6214-4d6c-a898-f78885e14300/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2856.037879] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3223787-de3c-4090-acf2-d58f329cdfcc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2856.045376] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aacbed65-2164-4ff8-a5cc-37e7d6c8179b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2856.074352] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3948c071-a5f6-415e-9be5-bc9fcb4627cc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2856.081132] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131d0912-9b36-4e8c-9081-2835a1201004 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2856.093693] env[61663]: DEBUG nova.compute.provider_tree [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2856.101662] env[61663]: DEBUG nova.scheduler.client.report [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2856.119816] env[61663]: DEBUG oslo_concurrency.lockutils [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.319s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2856.120366] env[61663]: ERROR nova.compute.manager [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2856.120366] env[61663]: Faults: ['InvalidArgument'] [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Traceback (most recent call last): [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] self.driver.spawn(context, instance, image_meta, [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] self._fetch_image_if_missing(context, vi) [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] image_cache(vi, tmp_image_ds_loc) [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] vm_util.copy_virtual_disk( [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] session._wait_for_task(vmdk_copy_task) [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] return self.wait_for_task(task_ref) [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] return evt.wait() [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] result = hub.switch() [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] return self.greenlet.switch() [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] self.f(*self.args, **self.kw) [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] raise exceptions.translate_fault(task_info.error) [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Faults: ['InvalidArgument'] [ 2856.120366] env[61663]: ERROR nova.compute.manager [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] [ 2856.121094] env[61663]: DEBUG nova.compute.utils [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2856.122456] env[61663]: DEBUG nova.compute.manager [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Build of instance 029ec7ad-96a1-42e0-a926-c1aab1de05a8 was re-scheduled: A specified parameter was not correct: fileType [ 2856.122456] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2856.122829] env[61663]: DEBUG nova.compute.manager [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2856.123067] env[61663]: DEBUG oslo_concurrency.lockutils [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Acquiring lock "refresh_cache-029ec7ad-96a1-42e0-a926-c1aab1de05a8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2856.123229] env[61663]: DEBUG oslo_concurrency.lockutils [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Acquired lock "refresh_cache-029ec7ad-96a1-42e0-a926-c1aab1de05a8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2856.123393] env[61663]: DEBUG nova.network.neutron [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2856.146949] env[61663]: DEBUG nova.network.neutron [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2856.203983] env[61663]: DEBUG nova.network.neutron [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2856.211999] env[61663]: DEBUG oslo_concurrency.lockutils [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Releasing lock "refresh_cache-029ec7ad-96a1-42e0-a926-c1aab1de05a8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2856.212223] env[61663]: DEBUG nova.compute.manager [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2856.212405] env[61663]: DEBUG nova.compute.manager [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Skipping network deallocation for instance since networking was not requested. {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 2856.297304] env[61663]: INFO nova.scheduler.client.report [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Deleted allocations for instance 029ec7ad-96a1-42e0-a926-c1aab1de05a8 [ 2856.316062] env[61663]: DEBUG oslo_concurrency.lockutils [None req-051f07db-a5d9-4515-8585-41c8eb0d4381 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Lock "029ec7ad-96a1-42e0-a926-c1aab1de05a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 594.720s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2856.316806] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "029ec7ad-96a1-42e0-a926-c1aab1de05a8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 440.229s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2856.317024] env[61663]: INFO nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] During sync_power_state the instance has a pending task (spawning). Skip. [ 2856.317218] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "029ec7ad-96a1-42e0-a926-c1aab1de05a8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2856.317799] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5cbc923e-af56-4134-9e31-e3b6ac9f1970 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Lock "029ec7ad-96a1-42e0-a926-c1aab1de05a8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 399.014s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2856.318057] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5cbc923e-af56-4134-9e31-e3b6ac9f1970 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Acquiring lock "029ec7ad-96a1-42e0-a926-c1aab1de05a8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2856.318602] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5cbc923e-af56-4134-9e31-e3b6ac9f1970 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Lock "029ec7ad-96a1-42e0-a926-c1aab1de05a8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2856.318602] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5cbc923e-af56-4134-9e31-e3b6ac9f1970 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Lock "029ec7ad-96a1-42e0-a926-c1aab1de05a8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2856.320373] env[61663]: INFO nova.compute.manager [None req-5cbc923e-af56-4134-9e31-e3b6ac9f1970 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Terminating instance [ 2856.321895] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5cbc923e-af56-4134-9e31-e3b6ac9f1970 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Acquiring lock "refresh_cache-029ec7ad-96a1-42e0-a926-c1aab1de05a8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2856.322070] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5cbc923e-af56-4134-9e31-e3b6ac9f1970 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Acquired lock "refresh_cache-029ec7ad-96a1-42e0-a926-c1aab1de05a8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2856.322244] env[61663]: DEBUG nova.network.neutron [None req-5cbc923e-af56-4134-9e31-e3b6ac9f1970 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2856.330903] env[61663]: DEBUG nova.compute.manager [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2856.350712] env[61663]: DEBUG nova.network.neutron [None req-5cbc923e-af56-4134-9e31-e3b6ac9f1970 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2856.384599] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2856.384887] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2856.386349] env[61663]: INFO nova.compute.claims [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2856.422498] env[61663]: DEBUG nova.network.neutron [None req-5cbc923e-af56-4134-9e31-e3b6ac9f1970 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2856.430024] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5cbc923e-af56-4134-9e31-e3b6ac9f1970 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Releasing lock "refresh_cache-029ec7ad-96a1-42e0-a926-c1aab1de05a8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2856.430459] env[61663]: DEBUG nova.compute.manager [None req-5cbc923e-af56-4134-9e31-e3b6ac9f1970 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2856.430681] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5cbc923e-af56-4134-9e31-e3b6ac9f1970 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2856.431454] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2eb58ea1-7df3-45d1-b38d-88ee572e1817 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2856.443399] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbab0a91-c494-40f2-ab1e-976e23fb74ca {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2856.473707] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-5cbc923e-af56-4134-9e31-e3b6ac9f1970 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 029ec7ad-96a1-42e0-a926-c1aab1de05a8 could not be found. [ 2856.473911] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5cbc923e-af56-4134-9e31-e3b6ac9f1970 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2856.474108] env[61663]: INFO nova.compute.manager [None req-5cbc923e-af56-4134-9e31-e3b6ac9f1970 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2856.474353] env[61663]: DEBUG oslo.service.loopingcall [None req-5cbc923e-af56-4134-9e31-e3b6ac9f1970 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2856.476750] env[61663]: DEBUG nova.compute.manager [-] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2856.476871] env[61663]: DEBUG nova.network.neutron [-] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2856.499011] env[61663]: DEBUG nova.network.neutron [-] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2856.510360] env[61663]: DEBUG nova.network.neutron [-] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2856.521597] env[61663]: INFO nova.compute.manager [-] [instance: 029ec7ad-96a1-42e0-a926-c1aab1de05a8] Took 0.04 seconds to deallocate network for instance. [ 2856.570155] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cdf30b5-bd16-4425-b9c8-a32f25c37c14 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2856.577507] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18471e36-9f1e-4cd6-ac7e-04dd175495f0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2856.610870] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d006f7-b7f3-4371-a090-c5de47bc3dbb {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2856.613429] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5cbc923e-af56-4134-9e31-e3b6ac9f1970 tempest-ServerShowV257Test-812361262 tempest-ServerShowV257Test-812361262-project-member] Lock "029ec7ad-96a1-42e0-a926-c1aab1de05a8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.296s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2856.619332] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed067d08-3bf0-4954-b347-c1ecce18584b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2856.632622] env[61663]: DEBUG nova.compute.provider_tree [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2856.640027] env[61663]: DEBUG nova.scheduler.client.report [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2856.650885] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.266s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2856.651312] env[61663]: DEBUG nova.compute.manager [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2856.681773] env[61663]: DEBUG nova.compute.utils [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2856.683093] env[61663]: DEBUG nova.compute.manager [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2856.683271] env[61663]: DEBUG nova.network.neutron [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2856.690652] env[61663]: DEBUG nova.compute.manager [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2856.736735] env[61663]: DEBUG nova.policy [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '033e5ebd18fb421b8ad3f4ad5033f1b5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7bb1bdc9b1004ff591ab4e001d81b400', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 2856.749993] env[61663]: DEBUG nova.compute.manager [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2856.774180] env[61663]: DEBUG nova.virt.hardware [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2856.774425] env[61663]: DEBUG nova.virt.hardware [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2856.774583] env[61663]: DEBUG nova.virt.hardware [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2856.774764] env[61663]: DEBUG nova.virt.hardware [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2856.774939] env[61663]: DEBUG nova.virt.hardware [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2856.775115] env[61663]: DEBUG nova.virt.hardware [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2856.775327] env[61663]: DEBUG nova.virt.hardware [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2856.775489] env[61663]: DEBUG nova.virt.hardware [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2856.775689] env[61663]: DEBUG nova.virt.hardware [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2856.775825] env[61663]: DEBUG nova.virt.hardware [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2856.776029] env[61663]: DEBUG nova.virt.hardware [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2856.776870] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66e49e85-dcb9-44cf-b2e4-44542e8ac2b4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2856.784543] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e21a50d-4716-4cb7-a9cb-d36bb517cb8b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2857.132993] env[61663]: DEBUG nova.network.neutron [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Successfully created port: 8b0b6a02-f13e-4f14-80bf-c54c5636ece6 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2857.771446] env[61663]: DEBUG nova.compute.manager [req-9e47cd33-4d19-4aea-a612-7e025b59b41e req-1772080a-99a0-489c-a639-d17ce2673e9e service nova] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Received event network-vif-plugged-8b0b6a02-f13e-4f14-80bf-c54c5636ece6 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2857.771446] env[61663]: DEBUG oslo_concurrency.lockutils [req-9e47cd33-4d19-4aea-a612-7e025b59b41e req-1772080a-99a0-489c-a639-d17ce2673e9e service nova] Acquiring lock "69194463-5c6d-4119-9c19-91b24149bd8b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2857.771697] env[61663]: DEBUG oslo_concurrency.lockutils [req-9e47cd33-4d19-4aea-a612-7e025b59b41e req-1772080a-99a0-489c-a639-d17ce2673e9e service nova] Lock "69194463-5c6d-4119-9c19-91b24149bd8b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2857.772692] env[61663]: DEBUG oslo_concurrency.lockutils [req-9e47cd33-4d19-4aea-a612-7e025b59b41e req-1772080a-99a0-489c-a639-d17ce2673e9e service nova] Lock "69194463-5c6d-4119-9c19-91b24149bd8b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2857.772692] env[61663]: DEBUG nova.compute.manager [req-9e47cd33-4d19-4aea-a612-7e025b59b41e req-1772080a-99a0-489c-a639-d17ce2673e9e service nova] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] No waiting events found dispatching network-vif-plugged-8b0b6a02-f13e-4f14-80bf-c54c5636ece6 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2857.772692] env[61663]: WARNING nova.compute.manager [req-9e47cd33-4d19-4aea-a612-7e025b59b41e req-1772080a-99a0-489c-a639-d17ce2673e9e service nova] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Received unexpected event network-vif-plugged-8b0b6a02-f13e-4f14-80bf-c54c5636ece6 for instance with vm_state building and task_state spawning. [ 2857.852101] env[61663]: DEBUG nova.network.neutron [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Successfully updated port: 8b0b6a02-f13e-4f14-80bf-c54c5636ece6 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2857.863674] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "refresh_cache-69194463-5c6d-4119-9c19-91b24149bd8b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2857.863830] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquired lock "refresh_cache-69194463-5c6d-4119-9c19-91b24149bd8b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2857.863979] env[61663]: DEBUG nova.network.neutron [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2857.904367] env[61663]: DEBUG nova.network.neutron [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2858.355289] env[61663]: DEBUG nova.network.neutron [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Updating instance_info_cache with network_info: [{"id": "8b0b6a02-f13e-4f14-80bf-c54c5636ece6", "address": "fa:16:3e:32:f4:e5", "network": {"id": "063a4d32-b405-433b-a1a0-ce9820683878", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-673894266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bb1bdc9b1004ff591ab4e001d81b400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b0b6a02-f1", "ovs_interfaceid": "8b0b6a02-f13e-4f14-80bf-c54c5636ece6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2858.370048] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Releasing lock "refresh_cache-69194463-5c6d-4119-9c19-91b24149bd8b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2858.370048] env[61663]: DEBUG nova.compute.manager [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Instance network_info: |[{"id": "8b0b6a02-f13e-4f14-80bf-c54c5636ece6", "address": "fa:16:3e:32:f4:e5", "network": {"id": "063a4d32-b405-433b-a1a0-ce9820683878", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-673894266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bb1bdc9b1004ff591ab4e001d81b400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b0b6a02-f1", "ovs_interfaceid": "8b0b6a02-f13e-4f14-80bf-c54c5636ece6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2858.370374] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:f4:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa09e855-8af1-419b-b78d-8ffcc94b1bfb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8b0b6a02-f13e-4f14-80bf-c54c5636ece6', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2858.378042] env[61663]: DEBUG oslo.service.loopingcall [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2858.378517] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2858.378791] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8fa1750-bf28-4bab-b03c-7a9ba7231bd1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2858.399414] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2858.399414] env[61663]: value = "task-1690911" [ 2858.399414] env[61663]: _type = "Task" [ 2858.399414] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2858.409306] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690911, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2858.911987] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690911, 'name': CreateVM_Task, 'duration_secs': 0.294498} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2858.912385] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2858.912839] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2858.913018] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2858.913363] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2858.913611] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d032ebd-81ae-4c75-8245-370f2bc1e735 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2858.918147] env[61663]: DEBUG oslo_vmware.api [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for the task: (returnval){ [ 2858.918147] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c04cb4-26df-57c7-67e7-f1ec905040e3" [ 2858.918147] env[61663]: _type = "Task" [ 2858.918147] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2858.926029] env[61663]: DEBUG oslo_vmware.api [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c04cb4-26df-57c7-67e7-f1ec905040e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2859.429770] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2859.430072] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2859.430251] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c1e01d76-1655-44a4-8e2f-6465b47cb943 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2859.798040] env[61663]: DEBUG nova.compute.manager [req-9b93836d-a283-4dd0-9b60-e25b2e224133 req-29def895-d36d-4350-b4ee-d95891824fbb service nova] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Received event network-changed-8b0b6a02-f13e-4f14-80bf-c54c5636ece6 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2859.798261] env[61663]: DEBUG nova.compute.manager [req-9b93836d-a283-4dd0-9b60-e25b2e224133 req-29def895-d36d-4350-b4ee-d95891824fbb service nova] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Refreshing instance network info cache due to event network-changed-8b0b6a02-f13e-4f14-80bf-c54c5636ece6. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2859.798480] env[61663]: DEBUG oslo_concurrency.lockutils [req-9b93836d-a283-4dd0-9b60-e25b2e224133 req-29def895-d36d-4350-b4ee-d95891824fbb service nova] Acquiring lock "refresh_cache-69194463-5c6d-4119-9c19-91b24149bd8b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2859.798628] env[61663]: DEBUG oslo_concurrency.lockutils [req-9b93836d-a283-4dd0-9b60-e25b2e224133 req-29def895-d36d-4350-b4ee-d95891824fbb service nova] Acquired lock "refresh_cache-69194463-5c6d-4119-9c19-91b24149bd8b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2859.798791] env[61663]: DEBUG nova.network.neutron [req-9b93836d-a283-4dd0-9b60-e25b2e224133 req-29def895-d36d-4350-b4ee-d95891824fbb service nova] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Refreshing network info cache for port 8b0b6a02-f13e-4f14-80bf-c54c5636ece6 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2860.081750] env[61663]: DEBUG nova.network.neutron [req-9b93836d-a283-4dd0-9b60-e25b2e224133 req-29def895-d36d-4350-b4ee-d95891824fbb service nova] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Updated VIF entry in instance network info cache for port 8b0b6a02-f13e-4f14-80bf-c54c5636ece6. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2860.082183] env[61663]: DEBUG nova.network.neutron [req-9b93836d-a283-4dd0-9b60-e25b2e224133 req-29def895-d36d-4350-b4ee-d95891824fbb service nova] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Updating instance_info_cache with network_info: [{"id": "8b0b6a02-f13e-4f14-80bf-c54c5636ece6", "address": "fa:16:3e:32:f4:e5", "network": {"id": "063a4d32-b405-433b-a1a0-ce9820683878", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-673894266-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7bb1bdc9b1004ff591ab4e001d81b400", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa09e855-8af1-419b-b78d-8ffcc94b1bfb", "external-id": "nsx-vlan-transportzone-901", "segmentation_id": 901, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b0b6a02-f1", "ovs_interfaceid": "8b0b6a02-f13e-4f14-80bf-c54c5636ece6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2860.091806] env[61663]: DEBUG oslo_concurrency.lockutils [req-9b93836d-a283-4dd0-9b60-e25b2e224133 req-29def895-d36d-4350-b4ee-d95891824fbb service nova] Releasing lock "refresh_cache-69194463-5c6d-4119-9c19-91b24149bd8b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2861.692900] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2866.692212] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2872.469880] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c51c9888-e10c-48e9-9257-513b09e2175d tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquiring lock "b98fdfac-2912-403c-a087-46e8eaf40829" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2873.687254] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2874.692423] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2875.692617] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2875.692932] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2875.692932] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2875.713777] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2875.713953] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2875.714083] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2875.714214] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2875.714339] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2875.714461] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2875.714582] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2875.714702] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2875.714820] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2875.714939] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2875.715077] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2875.715609] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2875.715779] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2875.715936] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2875.726508] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2875.726726] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2875.726893] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2875.727057] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2875.728132] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a68b9bbd-e557-4afc-9e8c-3eea274d777d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2875.737055] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3a0afa-fa45-4616-9ca8-bce726ed3a84 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2875.750547] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a42c5598-0e6f-457d-b1d8-3def4465637d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2875.756718] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c657dd-a9ba-45da-aec5-55c0050add5a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2875.787529] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181279MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2875.787683] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2875.787875] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2875.859912] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 0adee33d-8d0c-4bcf-8df4-11465be00485 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2875.860088] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2875.860223] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2875.860350] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 8fc36ed9-9315-4bdb-b4f3-248106a3c681 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2875.860475] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 0266b3f5-ee31-46d7-af5e-844a27bfd829 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2875.860583] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 911c036c-c7d8-4ff7-b874-335361fb5281 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2875.860696] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance bcc3a109-50ca-4a22-90f3-609231a3e95f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2875.860813] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b98fdfac-2912-403c-a087-46e8eaf40829 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2875.860929] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 3930490f-586e-4bbd-aad2-1b4995ff6aa3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2875.861062] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 69194463-5c6d-4119-9c19-91b24149bd8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2875.861258] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2875.861397] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2875.972008] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ca9277-9ee4-4663-864c-44e29480f9d7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2875.979574] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9be2f95-4273-4758-8c56-c20a5811ff40 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2876.008137] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac58356-e24e-4194-b35a-5f943ea3e7e0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2876.014545] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b48f555-22e1-4b0e-afba-bef95c153029 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2876.028474] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2876.037757] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2876.050974] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2876.051174] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.263s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2882.028972] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2882.029504] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2883.153856] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquiring lock "b84c5391-c337-4e45-823d-5779df22a116" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2883.154140] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Lock "b84c5391-c337-4e45-823d-5779df22a116" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2888.687601] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2904.631290] env[61663]: WARNING oslo_vmware.rw_handles [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2904.631290] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2904.631290] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2904.631290] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2904.631290] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2904.631290] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 2904.631290] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2904.631290] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2904.631290] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2904.631290] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2904.631290] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2904.631290] env[61663]: ERROR oslo_vmware.rw_handles [ 2904.631946] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/0db372ba-6214-4d6c-a898-f78885e14300/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2904.633726] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2904.633965] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Copying Virtual Disk [datastore1] vmware_temp/0db372ba-6214-4d6c-a898-f78885e14300/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/0db372ba-6214-4d6c-a898-f78885e14300/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2904.634300] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-233f7191-005a-4e66-8f95-01fe53e6752a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2904.642517] env[61663]: DEBUG oslo_vmware.api [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Waiting for the task: (returnval){ [ 2904.642517] env[61663]: value = "task-1690912" [ 2904.642517] env[61663]: _type = "Task" [ 2904.642517] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2904.650454] env[61663]: DEBUG oslo_vmware.api [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Task: {'id': task-1690912, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2905.153351] env[61663]: DEBUG oslo_vmware.exceptions [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2905.153628] env[61663]: DEBUG oslo_concurrency.lockutils [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2905.154231] env[61663]: ERROR nova.compute.manager [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2905.154231] env[61663]: Faults: ['InvalidArgument'] [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Traceback (most recent call last): [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] yield resources [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] self.driver.spawn(context, instance, image_meta, [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] self._fetch_image_if_missing(context, vi) [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] image_cache(vi, tmp_image_ds_loc) [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] vm_util.copy_virtual_disk( [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] session._wait_for_task(vmdk_copy_task) [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] return self.wait_for_task(task_ref) [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] return evt.wait() [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] result = hub.switch() [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] return self.greenlet.switch() [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] self.f(*self.args, **self.kw) [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] raise exceptions.translate_fault(task_info.error) [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Faults: ['InvalidArgument'] [ 2905.154231] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] [ 2905.155155] env[61663]: INFO nova.compute.manager [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Terminating instance [ 2905.156817] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2905.156817] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2905.156817] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b097eca2-ccd2-496a-b969-0c27bd78bcd0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2905.158673] env[61663]: DEBUG nova.compute.manager [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2905.158877] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2905.159586] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fda02f1f-85b0-42df-9b51-2e8bf17e7995 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2905.166031] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2905.166233] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1d58a596-b145-4285-ae8a-6e312305ecab {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2905.168335] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2905.168508] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2905.169442] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe80226d-1e57-491d-8e4b-c9755d75654e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2905.173920] env[61663]: DEBUG oslo_vmware.api [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Waiting for the task: (returnval){ [ 2905.173920] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52db1885-c253-b4b1-dc89-644f26c7384e" [ 2905.173920] env[61663]: _type = "Task" [ 2905.173920] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2905.180784] env[61663]: DEBUG oslo_vmware.api [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52db1885-c253-b4b1-dc89-644f26c7384e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2905.234196] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2905.234423] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2905.234605] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Deleting the datastore file [datastore1] 0adee33d-8d0c-4bcf-8df4-11465be00485 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2905.234866] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1f67ebd0-82e3-4181-af87-1d02ad1fb408 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2905.240583] env[61663]: DEBUG oslo_vmware.api [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Waiting for the task: (returnval){ [ 2905.240583] env[61663]: value = "task-1690914" [ 2905.240583] env[61663]: _type = "Task" [ 2905.240583] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2905.247808] env[61663]: DEBUG oslo_vmware.api [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Task: {'id': task-1690914, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2905.684480] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2905.684810] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Creating directory with path [datastore1] vmware_temp/e37970ce-d8b3-46d2-8d02-ba0353baf4ff/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2905.684912] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-135d3f9a-8b22-41e8-8ee8-0304c06ccb42 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2905.701331] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Created directory with path [datastore1] vmware_temp/e37970ce-d8b3-46d2-8d02-ba0353baf4ff/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2905.701520] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Fetch image to [datastore1] vmware_temp/e37970ce-d8b3-46d2-8d02-ba0353baf4ff/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2905.701690] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/e37970ce-d8b3-46d2-8d02-ba0353baf4ff/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2905.702426] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81cb9e02-abf9-45b2-a223-6968a455cc5c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2905.708846] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b21d5432-8e6d-4612-88a9-855fbb3eac55 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2905.717484] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f6e8fbe-c8d4-4b65-8590-981afb009215 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2905.749276] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90e5be78-e3b6-4242-a1b9-878a0125ccfd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2905.755851] env[61663]: DEBUG oslo_vmware.api [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Task: {'id': task-1690914, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080604} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2905.757285] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2905.757477] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2905.757654] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2905.757828] env[61663]: INFO nova.compute.manager [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2905.759597] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8743d26c-c9d1-47a0-a8c4-f52e27255260 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2905.761423] env[61663]: DEBUG nova.compute.claims [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2905.761599] env[61663]: DEBUG oslo_concurrency.lockutils [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2905.761808] env[61663]: DEBUG oslo_concurrency.lockutils [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2905.784883] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2905.835480] env[61663]: DEBUG oslo_vmware.rw_handles [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e37970ce-d8b3-46d2-8d02-ba0353baf4ff/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2905.899012] env[61663]: DEBUG oslo_vmware.rw_handles [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2905.899235] env[61663]: DEBUG oslo_vmware.rw_handles [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e37970ce-d8b3-46d2-8d02-ba0353baf4ff/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2905.992786] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a51e4d-a9b2-4599-af0b-9ed701e8674a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.000320] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9385e7b-28e2-4c21-967d-f92d605469b4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.029939] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb246be5-cfce-40d9-985a-918fc16cf929 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.036355] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990850e9-2832-44ad-bc5b-a2e21f865136 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.048705] env[61663]: DEBUG nova.compute.provider_tree [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2906.057313] env[61663]: DEBUG nova.scheduler.client.report [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2906.070588] env[61663]: DEBUG oslo_concurrency.lockutils [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.309s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2906.071114] env[61663]: ERROR nova.compute.manager [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2906.071114] env[61663]: Faults: ['InvalidArgument'] [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Traceback (most recent call last): [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] self.driver.spawn(context, instance, image_meta, [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] self._fetch_image_if_missing(context, vi) [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] image_cache(vi, tmp_image_ds_loc) [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] vm_util.copy_virtual_disk( [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] session._wait_for_task(vmdk_copy_task) [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] return self.wait_for_task(task_ref) [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] return evt.wait() [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] result = hub.switch() [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] return self.greenlet.switch() [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] self.f(*self.args, **self.kw) [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] raise exceptions.translate_fault(task_info.error) [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Faults: ['InvalidArgument'] [ 2906.071114] env[61663]: ERROR nova.compute.manager [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] [ 2906.071884] env[61663]: DEBUG nova.compute.utils [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2906.073213] env[61663]: DEBUG nova.compute.manager [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Build of instance 0adee33d-8d0c-4bcf-8df4-11465be00485 was re-scheduled: A specified parameter was not correct: fileType [ 2906.073213] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2906.073586] env[61663]: DEBUG nova.compute.manager [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2906.073758] env[61663]: DEBUG nova.compute.manager [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2906.073930] env[61663]: DEBUG nova.compute.manager [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2906.074112] env[61663]: DEBUG nova.network.neutron [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2906.382510] env[61663]: DEBUG nova.network.neutron [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2906.393043] env[61663]: INFO nova.compute.manager [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Took 0.32 seconds to deallocate network for instance. [ 2906.489016] env[61663]: INFO nova.scheduler.client.report [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Deleted allocations for instance 0adee33d-8d0c-4bcf-8df4-11465be00485 [ 2906.513570] env[61663]: DEBUG oslo_concurrency.lockutils [None req-51df8c92-8224-4612-897f-d99a565d4f34 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Lock "0adee33d-8d0c-4bcf-8df4-11465be00485" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 638.797s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2906.514544] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "0adee33d-8d0c-4bcf-8df4-11465be00485" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 490.427s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2906.514739] env[61663]: INFO nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] During sync_power_state the instance has a pending task (spawning). Skip. [ 2906.514917] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "0adee33d-8d0c-4bcf-8df4-11465be00485" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2906.515563] env[61663]: DEBUG oslo_concurrency.lockutils [None req-7239e18d-2df9-430d-b37a-034ece427667 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Lock "0adee33d-8d0c-4bcf-8df4-11465be00485" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 443.279s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2906.515935] env[61663]: DEBUG oslo_concurrency.lockutils [None req-7239e18d-2df9-430d-b37a-034ece427667 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Acquiring lock "0adee33d-8d0c-4bcf-8df4-11465be00485-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2906.516058] env[61663]: DEBUG oslo_concurrency.lockutils [None req-7239e18d-2df9-430d-b37a-034ece427667 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Lock "0adee33d-8d0c-4bcf-8df4-11465be00485-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2906.516231] env[61663]: DEBUG oslo_concurrency.lockutils [None req-7239e18d-2df9-430d-b37a-034ece427667 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Lock "0adee33d-8d0c-4bcf-8df4-11465be00485-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2906.518388] env[61663]: INFO nova.compute.manager [None req-7239e18d-2df9-430d-b37a-034ece427667 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Terminating instance [ 2906.520220] env[61663]: DEBUG nova.compute.manager [None req-7239e18d-2df9-430d-b37a-034ece427667 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2906.520414] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-7239e18d-2df9-430d-b37a-034ece427667 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2906.520662] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a08b3047-9ceb-47f5-bb29-5b16067f7a6e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.526259] env[61663]: DEBUG nova.compute.manager [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: b84c5391-c337-4e45-823d-5779df22a116] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2906.534871] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48a82b62-2d16-482d-8fa6-a070cb84f597 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.567134] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-7239e18d-2df9-430d-b37a-034ece427667 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0adee33d-8d0c-4bcf-8df4-11465be00485 could not be found. [ 2906.567134] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-7239e18d-2df9-430d-b37a-034ece427667 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2906.567217] env[61663]: INFO nova.compute.manager [None req-7239e18d-2df9-430d-b37a-034ece427667 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2906.567457] env[61663]: DEBUG oslo.service.loopingcall [None req-7239e18d-2df9-430d-b37a-034ece427667 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2906.569848] env[61663]: DEBUG nova.compute.manager [-] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2906.569848] env[61663]: DEBUG nova.network.neutron [-] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2906.583054] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2906.583294] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2906.584712] env[61663]: INFO nova.compute.claims [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: b84c5391-c337-4e45-823d-5779df22a116] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2906.600539] env[61663]: DEBUG nova.network.neutron [-] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2906.627836] env[61663]: INFO nova.compute.manager [-] [instance: 0adee33d-8d0c-4bcf-8df4-11465be00485] Took 0.06 seconds to deallocate network for instance. [ 2906.719616] env[61663]: DEBUG oslo_concurrency.lockutils [None req-7239e18d-2df9-430d-b37a-034ece427667 tempest-AttachVolumeTestJSON-1206770338 tempest-AttachVolumeTestJSON-1206770338-project-member] Lock "0adee33d-8d0c-4bcf-8df4-11465be00485" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.204s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2906.774107] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-086d3748-48ea-4511-a656-c92d00d3400a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.781239] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f17a7c86-4ba9-4ab6-a440-37fb7713b885 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.810257] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-431a268a-448e-4732-8e4a-8cfa875bf88a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.816781] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ce3eca-cd86-4bfe-9be0-e33251dca3cd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.829054] env[61663]: DEBUG nova.compute.provider_tree [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2906.837485] env[61663]: DEBUG nova.scheduler.client.report [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2906.850751] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.267s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2906.851225] env[61663]: DEBUG nova.compute.manager [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: b84c5391-c337-4e45-823d-5779df22a116] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2906.880947] env[61663]: DEBUG nova.compute.utils [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2906.882330] env[61663]: DEBUG nova.compute.manager [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: b84c5391-c337-4e45-823d-5779df22a116] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2906.882525] env[61663]: DEBUG nova.network.neutron [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: b84c5391-c337-4e45-823d-5779df22a116] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2906.890686] env[61663]: DEBUG nova.compute.manager [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: b84c5391-c337-4e45-823d-5779df22a116] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2906.938416] env[61663]: DEBUG nova.policy [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '23af862ab660499ab02b71d7cbbe87a7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '231acc431e92432795932c50511f2944', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 2906.951667] env[61663]: DEBUG nova.compute.manager [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: b84c5391-c337-4e45-823d-5779df22a116] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2906.976808] env[61663]: DEBUG nova.virt.hardware [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2906.977068] env[61663]: DEBUG nova.virt.hardware [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2906.977234] env[61663]: DEBUG nova.virt.hardware [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2906.977420] env[61663]: DEBUG nova.virt.hardware [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2906.977568] env[61663]: DEBUG nova.virt.hardware [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2906.977718] env[61663]: DEBUG nova.virt.hardware [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2906.977925] env[61663]: DEBUG nova.virt.hardware [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2906.978107] env[61663]: DEBUG nova.virt.hardware [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2906.978277] env[61663]: DEBUG nova.virt.hardware [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2906.978458] env[61663]: DEBUG nova.virt.hardware [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2906.978616] env[61663]: DEBUG nova.virt.hardware [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2906.979571] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ba59d2-be85-4cd9-8d9c-3350dd6892ee {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2906.987395] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8006950-3835-4fb8-940f-582b3dcef62a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2907.303665] env[61663]: DEBUG nova.network.neutron [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: b84c5391-c337-4e45-823d-5779df22a116] Successfully created port: a99776a9-9a65-4534-9d45-34938f82bd4c {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2908.100480] env[61663]: DEBUG nova.network.neutron [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: b84c5391-c337-4e45-823d-5779df22a116] Successfully updated port: a99776a9-9a65-4534-9d45-34938f82bd4c {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2908.114194] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquiring lock "refresh_cache-b84c5391-c337-4e45-823d-5779df22a116" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2908.114347] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquired lock "refresh_cache-b84c5391-c337-4e45-823d-5779df22a116" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2908.114500] env[61663]: DEBUG nova.network.neutron [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: b84c5391-c337-4e45-823d-5779df22a116] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2908.152454] env[61663]: DEBUG nova.network.neutron [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: b84c5391-c337-4e45-823d-5779df22a116] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2908.350444] env[61663]: DEBUG nova.network.neutron [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: b84c5391-c337-4e45-823d-5779df22a116] Updating instance_info_cache with network_info: [{"id": "a99776a9-9a65-4534-9d45-34938f82bd4c", "address": "fa:16:3e:51:96:8e", "network": {"id": "c74991f7-41c9-42d7-9978-5fba7e2b62af", "bridge": "br-int", "label": "tempest-ServersTestJSON-1185630305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "231acc431e92432795932c50511f2944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa99776a9-9a", "ovs_interfaceid": "a99776a9-9a65-4534-9d45-34938f82bd4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2908.361416] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Releasing lock "refresh_cache-b84c5391-c337-4e45-823d-5779df22a116" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2908.361687] env[61663]: DEBUG nova.compute.manager [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: b84c5391-c337-4e45-823d-5779df22a116] Instance network_info: |[{"id": "a99776a9-9a65-4534-9d45-34938f82bd4c", "address": "fa:16:3e:51:96:8e", "network": {"id": "c74991f7-41c9-42d7-9978-5fba7e2b62af", "bridge": "br-int", "label": "tempest-ServersTestJSON-1185630305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "231acc431e92432795932c50511f2944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa99776a9-9a", "ovs_interfaceid": "a99776a9-9a65-4534-9d45-34938f82bd4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2908.362094] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: b84c5391-c337-4e45-823d-5779df22a116] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:96:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f67a2790-f2b0-4d03-b606-0bfaee7a4229', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a99776a9-9a65-4534-9d45-34938f82bd4c', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2908.373359] env[61663]: DEBUG oslo.service.loopingcall [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2908.373945] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b84c5391-c337-4e45-823d-5779df22a116] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2908.374251] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3f1015da-3030-4246-b2f7-16d24f86ea7a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2908.403628] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2908.403628] env[61663]: value = "task-1690915" [ 2908.403628] env[61663]: _type = "Task" [ 2908.403628] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2908.414155] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690915, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2908.449736] env[61663]: DEBUG nova.compute.manager [req-e3d62ed4-b43b-4ef9-9963-af276a0f2dd3 req-32400fe3-1fb0-4473-a469-a57527431f65 service nova] [instance: b84c5391-c337-4e45-823d-5779df22a116] Received event network-vif-plugged-a99776a9-9a65-4534-9d45-34938f82bd4c {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2908.449831] env[61663]: DEBUG oslo_concurrency.lockutils [req-e3d62ed4-b43b-4ef9-9963-af276a0f2dd3 req-32400fe3-1fb0-4473-a469-a57527431f65 service nova] Acquiring lock "b84c5391-c337-4e45-823d-5779df22a116-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2908.450028] env[61663]: DEBUG oslo_concurrency.lockutils [req-e3d62ed4-b43b-4ef9-9963-af276a0f2dd3 req-32400fe3-1fb0-4473-a469-a57527431f65 service nova] Lock "b84c5391-c337-4e45-823d-5779df22a116-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2908.450199] env[61663]: DEBUG oslo_concurrency.lockutils [req-e3d62ed4-b43b-4ef9-9963-af276a0f2dd3 req-32400fe3-1fb0-4473-a469-a57527431f65 service nova] Lock "b84c5391-c337-4e45-823d-5779df22a116-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2908.450370] env[61663]: DEBUG nova.compute.manager [req-e3d62ed4-b43b-4ef9-9963-af276a0f2dd3 req-32400fe3-1fb0-4473-a469-a57527431f65 service nova] [instance: b84c5391-c337-4e45-823d-5779df22a116] No waiting events found dispatching network-vif-plugged-a99776a9-9a65-4534-9d45-34938f82bd4c {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2908.450534] env[61663]: WARNING nova.compute.manager [req-e3d62ed4-b43b-4ef9-9963-af276a0f2dd3 req-32400fe3-1fb0-4473-a469-a57527431f65 service nova] [instance: b84c5391-c337-4e45-823d-5779df22a116] Received unexpected event network-vif-plugged-a99776a9-9a65-4534-9d45-34938f82bd4c for instance with vm_state building and task_state spawning. [ 2908.450696] env[61663]: DEBUG nova.compute.manager [req-e3d62ed4-b43b-4ef9-9963-af276a0f2dd3 req-32400fe3-1fb0-4473-a469-a57527431f65 service nova] [instance: b84c5391-c337-4e45-823d-5779df22a116] Received event network-changed-a99776a9-9a65-4534-9d45-34938f82bd4c {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2908.450851] env[61663]: DEBUG nova.compute.manager [req-e3d62ed4-b43b-4ef9-9963-af276a0f2dd3 req-32400fe3-1fb0-4473-a469-a57527431f65 service nova] [instance: b84c5391-c337-4e45-823d-5779df22a116] Refreshing instance network info cache due to event network-changed-a99776a9-9a65-4534-9d45-34938f82bd4c. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2908.451046] env[61663]: DEBUG oslo_concurrency.lockutils [req-e3d62ed4-b43b-4ef9-9963-af276a0f2dd3 req-32400fe3-1fb0-4473-a469-a57527431f65 service nova] Acquiring lock "refresh_cache-b84c5391-c337-4e45-823d-5779df22a116" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2908.451187] env[61663]: DEBUG oslo_concurrency.lockutils [req-e3d62ed4-b43b-4ef9-9963-af276a0f2dd3 req-32400fe3-1fb0-4473-a469-a57527431f65 service nova] Acquired lock "refresh_cache-b84c5391-c337-4e45-823d-5779df22a116" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2908.451341] env[61663]: DEBUG nova.network.neutron [req-e3d62ed4-b43b-4ef9-9963-af276a0f2dd3 req-32400fe3-1fb0-4473-a469-a57527431f65 service nova] [instance: b84c5391-c337-4e45-823d-5779df22a116] Refreshing network info cache for port a99776a9-9a65-4534-9d45-34938f82bd4c {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2908.768658] env[61663]: DEBUG nova.network.neutron [req-e3d62ed4-b43b-4ef9-9963-af276a0f2dd3 req-32400fe3-1fb0-4473-a469-a57527431f65 service nova] [instance: b84c5391-c337-4e45-823d-5779df22a116] Updated VIF entry in instance network info cache for port a99776a9-9a65-4534-9d45-34938f82bd4c. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2908.769032] env[61663]: DEBUG nova.network.neutron [req-e3d62ed4-b43b-4ef9-9963-af276a0f2dd3 req-32400fe3-1fb0-4473-a469-a57527431f65 service nova] [instance: b84c5391-c337-4e45-823d-5779df22a116] Updating instance_info_cache with network_info: [{"id": "a99776a9-9a65-4534-9d45-34938f82bd4c", "address": "fa:16:3e:51:96:8e", "network": {"id": "c74991f7-41c9-42d7-9978-5fba7e2b62af", "bridge": "br-int", "label": "tempest-ServersTestJSON-1185630305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "231acc431e92432795932c50511f2944", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa99776a9-9a", "ovs_interfaceid": "a99776a9-9a65-4534-9d45-34938f82bd4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2908.778277] env[61663]: DEBUG oslo_concurrency.lockutils [req-e3d62ed4-b43b-4ef9-9963-af276a0f2dd3 req-32400fe3-1fb0-4473-a469-a57527431f65 service nova] Releasing lock "refresh_cache-b84c5391-c337-4e45-823d-5779df22a116" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2908.917282] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690915, 'name': CreateVM_Task, 'duration_secs': 0.295282} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2908.917538] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b84c5391-c337-4e45-823d-5779df22a116] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2908.918459] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2908.918714] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2908.919220] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2908.919561] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f84a8430-7708-4ba0-b52e-c926a91e1aab {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2908.924897] env[61663]: DEBUG oslo_vmware.api [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Waiting for the task: (returnval){ [ 2908.924897] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c0a9d8-024a-17b9-c02b-e0a029e9e56f" [ 2908.924897] env[61663]: _type = "Task" [ 2908.924897] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2908.935243] env[61663]: DEBUG oslo_vmware.api [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c0a9d8-024a-17b9-c02b-e0a029e9e56f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2909.435447] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2909.435866] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: b84c5391-c337-4e45-823d-5779df22a116] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2909.435985] env[61663]: DEBUG oslo_concurrency.lockutils [None req-0ff85729-fb36-431b-8972-3b403d9d07e7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2922.049417] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Acquiring lock "41bde9a5-f03b-46e6-b868-95762d554114" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2922.049760] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Lock "41bde9a5-f03b-46e6-b868-95762d554114" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2923.691783] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2928.691614] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2934.688712] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2935.691774] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2936.692758] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2936.693121] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Starting heal instance info cache {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2936.693121] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Rebuilding the list of instances to heal {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2936.714562] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2936.714758] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2936.714842] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2936.714967] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2936.715253] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2936.715411] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: bcc3a109-50ca-4a22-90f3-609231a3e95f] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2936.715539] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: b98fdfac-2912-403c-a087-46e8eaf40829] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2936.715663] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 3930490f-586e-4bbd-aad2-1b4995ff6aa3] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2936.715783] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: 69194463-5c6d-4119-9c19-91b24149bd8b] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2936.715903] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] [instance: b84c5391-c337-4e45-823d-5779df22a116] Skipping network cache update for instance because it is Building. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2936.716031] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Didn't find any instances for network info cache update. {{(pid=61663) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2936.716544] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2936.716725] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2937.691833] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager.update_available_resource {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2937.703749] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2937.704026] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2937.704148] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2937.704356] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61663) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2937.705471] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21ec8fff-49a4-4b33-be97-bcd88a14f9f4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2937.714365] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6f8ca4-4bd6-46f7-b8f4-0050d9a144d7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2937.727849] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4953d0a9-f9ae-438d-9a8d-304188dde3d9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2937.733856] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9aa6ecb-c9d3-43f0-8dac-0e95ae8fbdff {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2937.761439] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181316MB free_disk=183GB free_vcpus=48 pci_devices=None {{(pid=61663) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2937.761580] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2937.761768] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2937.832407] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2937.832573] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2937.832704] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 8fc36ed9-9315-4bdb-b4f3-248106a3c681 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2937.832829] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 0266b3f5-ee31-46d7-af5e-844a27bfd829 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2937.832949] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 911c036c-c7d8-4ff7-b874-335361fb5281 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2937.833081] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance bcc3a109-50ca-4a22-90f3-609231a3e95f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2937.833202] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b98fdfac-2912-403c-a087-46e8eaf40829 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2937.833320] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 3930490f-586e-4bbd-aad2-1b4995ff6aa3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2937.833438] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 69194463-5c6d-4119-9c19-91b24149bd8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2937.833549] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance b84c5391-c337-4e45-823d-5779df22a116 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2937.844049] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Instance 41bde9a5-f03b-46e6-b868-95762d554114 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61663) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2937.844289] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2937.844439] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61663) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2937.969946] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae987d30-dc6f-46cf-91dc-c73a1e0d9ca9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2937.977667] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db0e0713-8679-4f7a-a250-ed424cef8259 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2938.006727] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9162773b-5a1c-4c7a-993c-0f7fc2e2196d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2938.013512] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c64356-0489-4176-8abb-35c89571897e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2938.026040] env[61663]: DEBUG nova.compute.provider_tree [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2938.034655] env[61663]: DEBUG nova.scheduler.client.report [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2938.048396] env[61663]: DEBUG nova.compute.resource_tracker [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61663) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2938.048611] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.287s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2943.049980] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2943.050352] env[61663]: DEBUG nova.compute.manager [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61663) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2951.944422] env[61663]: WARNING oslo_vmware.rw_handles [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2951.944422] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2951.944422] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2951.944422] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2951.944422] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2951.944422] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 2951.944422] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2951.944422] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2951.944422] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2951.944422] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2951.944422] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2951.944422] env[61663]: ERROR oslo_vmware.rw_handles [ 2951.944956] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/e37970ce-d8b3-46d2-8d02-ba0353baf4ff/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2951.947019] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2951.947277] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Copying Virtual Disk [datastore1] vmware_temp/e37970ce-d8b3-46d2-8d02-ba0353baf4ff/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/e37970ce-d8b3-46d2-8d02-ba0353baf4ff/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2951.947574] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7bfd26d5-4ce0-4cab-85c2-b606ab239eda {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2951.955357] env[61663]: DEBUG oslo_vmware.api [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Waiting for the task: (returnval){ [ 2951.955357] env[61663]: value = "task-1690916" [ 2951.955357] env[61663]: _type = "Task" [ 2951.955357] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2951.963175] env[61663]: DEBUG oslo_vmware.api [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Task: {'id': task-1690916, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2952.467297] env[61663]: DEBUG oslo_vmware.exceptions [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2952.467591] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2952.468196] env[61663]: ERROR nova.compute.manager [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2952.468196] env[61663]: Faults: ['InvalidArgument'] [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Traceback (most recent call last): [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] yield resources [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] self.driver.spawn(context, instance, image_meta, [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] self._fetch_image_if_missing(context, vi) [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] image_cache(vi, tmp_image_ds_loc) [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] vm_util.copy_virtual_disk( [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] session._wait_for_task(vmdk_copy_task) [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] return self.wait_for_task(task_ref) [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] return evt.wait() [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] result = hub.switch() [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] return self.greenlet.switch() [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] self.f(*self.args, **self.kw) [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] raise exceptions.translate_fault(task_info.error) [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Faults: ['InvalidArgument'] [ 2952.468196] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] [ 2952.468965] env[61663]: INFO nova.compute.manager [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Terminating instance [ 2952.470045] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2952.470269] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2952.470497] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c821dc6c-0d46-4456-9081-be2827f86e90 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2952.472684] env[61663]: DEBUG nova.compute.manager [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2952.472877] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2952.473578] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-103d2c5b-dca6-46a9-ba53-e61c462c9a87 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2952.480359] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2952.481361] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f79657f0-9099-4eb9-9114-19f63780bfd1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2952.482659] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2952.482832] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2952.483481] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-398ef43c-8aaf-4ca7-a380-5f7b2a7af83c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2952.488098] env[61663]: DEBUG oslo_vmware.api [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Waiting for the task: (returnval){ [ 2952.488098] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52474bf8-b112-fa9a-6555-06e3a6b3112e" [ 2952.488098] env[61663]: _type = "Task" [ 2952.488098] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2952.494837] env[61663]: DEBUG oslo_vmware.api [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52474bf8-b112-fa9a-6555-06e3a6b3112e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2952.562167] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2952.562393] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2952.562523] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Deleting the datastore file [datastore1] 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2952.562787] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-016ff826-31f1-4330-b3ff-4feca1df4a83 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2952.568647] env[61663]: DEBUG oslo_vmware.api [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Waiting for the task: (returnval){ [ 2952.568647] env[61663]: value = "task-1690918" [ 2952.568647] env[61663]: _type = "Task" [ 2952.568647] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2952.575883] env[61663]: DEBUG oslo_vmware.api [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Task: {'id': task-1690918, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2952.998374] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2952.998727] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Creating directory with path [datastore1] vmware_temp/3891ba52-e8e5-4634-8ef9-b08fedf38012/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2952.998885] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49af1469-c83a-48d6-9a09-df0523023753 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2953.011270] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Created directory with path [datastore1] vmware_temp/3891ba52-e8e5-4634-8ef9-b08fedf38012/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2953.011468] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Fetch image to [datastore1] vmware_temp/3891ba52-e8e5-4634-8ef9-b08fedf38012/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2953.011642] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/3891ba52-e8e5-4634-8ef9-b08fedf38012/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2953.012386] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416a50b4-eb5b-4a2f-8983-c00c2517f024 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2953.018801] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3378d4a1-294d-4604-8547-17af1a154d01 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2953.027287] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4191567-e868-4e7f-a787-0c3136a2e7b7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2953.056996] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b51c10-a3b7-419f-8f9f-8a7c017e6810 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2953.062126] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2e5953e5-bb6c-4b80-9a70-85273b5e3212 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2953.075718] env[61663]: DEBUG oslo_vmware.api [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Task: {'id': task-1690918, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063003} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2953.075943] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2953.076156] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2953.076325] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2953.076515] env[61663]: INFO nova.compute.manager [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2953.078719] env[61663]: DEBUG nova.compute.claims [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2953.078904] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2953.079154] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2953.084051] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2953.226645] env[61663]: DEBUG oslo_vmware.rw_handles [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3891ba52-e8e5-4634-8ef9-b08fedf38012/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2953.289156] env[61663]: DEBUG oslo_vmware.rw_handles [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2953.289356] env[61663]: DEBUG oslo_vmware.rw_handles [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3891ba52-e8e5-4634-8ef9-b08fedf38012/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2953.309255] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10438ece-9415-4544-ba9c-39f54169629f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2953.316317] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5827999-8b95-4e5b-95ac-673e3dd57f92 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2953.345146] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb980e6c-3ce6-4991-b2c7-296d25302286 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2953.351562] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5711c47c-1b96-41aa-a352-0f1fc6636156 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2953.363842] env[61663]: DEBUG nova.compute.provider_tree [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2953.374256] env[61663]: DEBUG nova.scheduler.client.report [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2953.389169] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.310s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2953.389695] env[61663]: ERROR nova.compute.manager [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2953.389695] env[61663]: Faults: ['InvalidArgument'] [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Traceback (most recent call last): [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] self.driver.spawn(context, instance, image_meta, [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] self._fetch_image_if_missing(context, vi) [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] image_cache(vi, tmp_image_ds_loc) [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] vm_util.copy_virtual_disk( [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] session._wait_for_task(vmdk_copy_task) [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] return self.wait_for_task(task_ref) [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] return evt.wait() [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] result = hub.switch() [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] return self.greenlet.switch() [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] self.f(*self.args, **self.kw) [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] raise exceptions.translate_fault(task_info.error) [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Faults: ['InvalidArgument'] [ 2953.389695] env[61663]: ERROR nova.compute.manager [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] [ 2953.390575] env[61663]: DEBUG nova.compute.utils [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2953.391754] env[61663]: DEBUG nova.compute.manager [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Build of instance 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24 was re-scheduled: A specified parameter was not correct: fileType [ 2953.391754] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2953.392129] env[61663]: DEBUG nova.compute.manager [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2953.392304] env[61663]: DEBUG nova.compute.manager [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2953.392476] env[61663]: DEBUG nova.compute.manager [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2953.392638] env[61663]: DEBUG nova.network.neutron [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2953.773390] env[61663]: DEBUG nova.network.neutron [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2953.783778] env[61663]: INFO nova.compute.manager [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Took 0.39 seconds to deallocate network for instance. [ 2953.877344] env[61663]: INFO nova.scheduler.client.report [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Deleted allocations for instance 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24 [ 2953.898461] env[61663]: DEBUG oslo_concurrency.lockutils [None req-9fd3797a-bfce-4438-8116-ef60714eb8eb tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Lock "18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 670.308s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2953.899565] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c61b9f4f-4bfd-4ea1-a984-19ffdd342b9a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Lock "18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 474.062s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2953.899860] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c61b9f4f-4bfd-4ea1-a984-19ffdd342b9a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquiring lock "18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2953.900148] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c61b9f4f-4bfd-4ea1-a984-19ffdd342b9a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Lock "18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2953.900389] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c61b9f4f-4bfd-4ea1-a984-19ffdd342b9a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Lock "18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2953.902363] env[61663]: INFO nova.compute.manager [None req-c61b9f4f-4bfd-4ea1-a984-19ffdd342b9a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Terminating instance [ 2953.904998] env[61663]: DEBUG nova.compute.manager [None req-c61b9f4f-4bfd-4ea1-a984-19ffdd342b9a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2953.904998] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-c61b9f4f-4bfd-4ea1-a984-19ffdd342b9a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2953.905340] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-28921ed3-088f-4076-8180-e337a322ebca {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2953.915664] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa46eb4e-6c1c-4aa5-a02f-9cef6c128182 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2953.927016] env[61663]: DEBUG nova.compute.manager [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] [instance: 41bde9a5-f03b-46e6-b868-95762d554114] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2953.946907] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-c61b9f4f-4bfd-4ea1-a984-19ffdd342b9a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24 could not be found. [ 2953.947120] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-c61b9f4f-4bfd-4ea1-a984-19ffdd342b9a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2953.947306] env[61663]: INFO nova.compute.manager [None req-c61b9f4f-4bfd-4ea1-a984-19ffdd342b9a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2953.947561] env[61663]: DEBUG oslo.service.loopingcall [None req-c61b9f4f-4bfd-4ea1-a984-19ffdd342b9a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2953.947809] env[61663]: DEBUG nova.compute.manager [-] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2953.947906] env[61663]: DEBUG nova.network.neutron [-] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2953.972521] env[61663]: DEBUG nova.network.neutron [-] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2953.975378] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2953.975605] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2953.977287] env[61663]: INFO nova.compute.claims [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] [instance: 41bde9a5-f03b-46e6-b868-95762d554114] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2953.980586] env[61663]: INFO nova.compute.manager [-] [instance: 18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24] Took 0.03 seconds to deallocate network for instance. [ 2954.061605] env[61663]: DEBUG oslo_concurrency.lockutils [None req-c61b9f4f-4bfd-4ea1-a984-19ffdd342b9a tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Lock "18a6a6ff-2f6e-4f15-a4d8-c03d045c7f24" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.162s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2954.157357] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14261db1-60eb-4ffc-bfb7-d660aa17a17b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2954.164877] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13337b35-171b-439d-bae9-a1810e5ee078 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2954.194328] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fdb39a7-8e9a-45ef-9b61-a7307db1cf7e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2954.201909] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1769ad-d54e-409d-a829-537b372cd36d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2954.216354] env[61663]: DEBUG nova.compute.provider_tree [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2954.224949] env[61663]: DEBUG nova.scheduler.client.report [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2954.238023] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.262s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2954.238470] env[61663]: DEBUG nova.compute.manager [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] [instance: 41bde9a5-f03b-46e6-b868-95762d554114] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2954.270639] env[61663]: DEBUG nova.compute.utils [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2954.272089] env[61663]: DEBUG nova.compute.manager [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] [instance: 41bde9a5-f03b-46e6-b868-95762d554114] Not allocating networking since 'none' was specified. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 2954.280458] env[61663]: DEBUG nova.compute.manager [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] [instance: 41bde9a5-f03b-46e6-b868-95762d554114] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2954.349635] env[61663]: DEBUG nova.compute.manager [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] [instance: 41bde9a5-f03b-46e6-b868-95762d554114] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2954.375588] env[61663]: DEBUG nova.virt.hardware [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2954.375838] env[61663]: DEBUG nova.virt.hardware [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2954.375999] env[61663]: DEBUG nova.virt.hardware [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2954.376205] env[61663]: DEBUG nova.virt.hardware [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2954.376357] env[61663]: DEBUG nova.virt.hardware [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2954.376508] env[61663]: DEBUG nova.virt.hardware [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2954.376749] env[61663]: DEBUG nova.virt.hardware [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2954.376918] env[61663]: DEBUG nova.virt.hardware [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2954.377137] env[61663]: DEBUG nova.virt.hardware [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2954.377319] env[61663]: DEBUG nova.virt.hardware [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2954.377499] env[61663]: DEBUG nova.virt.hardware [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2954.378403] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6cc7648-f4d4-48eb-9522-bbba134edee0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2954.386300] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76344ef3-4316-4792-be51-d61e02ebf879 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2954.399202] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] [instance: 41bde9a5-f03b-46e6-b868-95762d554114] Instance VIF info [] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2954.404530] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Creating folder: Project (6af7169205864bd0b5d26d69d5b615ce). Parent ref: group-v352575. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2954.404775] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-be71c6b1-0333-4c7f-95dd-5a97a8c25601 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2954.413592] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Created folder: Project (6af7169205864bd0b5d26d69d5b615ce) in parent group-v352575. [ 2954.413776] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Creating folder: Instances. Parent ref: group-v352700. {{(pid=61663) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2954.413973] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f8480322-32dc-4958-9a24-b0de3509e390 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2954.422588] env[61663]: INFO nova.virt.vmwareapi.vm_util [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Created folder: Instances in parent group-v352700. [ 2954.422806] env[61663]: DEBUG oslo.service.loopingcall [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2954.422976] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41bde9a5-f03b-46e6-b868-95762d554114] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2954.423186] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b3934b3-2400-4b0a-86d9-6d40dea1554f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2954.437912] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2954.437912] env[61663]: value = "task-1690921" [ 2954.437912] env[61663]: _type = "Task" [ 2954.437912] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2954.444816] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690921, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2954.947687] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690921, 'name': CreateVM_Task} progress is 99%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2955.448715] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690921, 'name': CreateVM_Task} progress is 99%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2955.948902] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690921, 'name': CreateVM_Task} progress is 99%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2956.449774] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690921, 'name': CreateVM_Task} progress is 99%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2956.950641] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690921, 'name': CreateVM_Task} progress is 99%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2957.452830] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690921, 'name': CreateVM_Task} progress is 99%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2957.954628] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690921, 'name': CreateVM_Task, 'duration_secs': 3.372559} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2957.954799] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41bde9a5-f03b-46e6-b868-95762d554114] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2957.955243] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2957.955408] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2957.955733] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2957.956055] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2bebd26e-ed13-4347-a524-f1976e0cc7b8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2957.960503] env[61663]: DEBUG oslo_vmware.api [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Waiting for the task: (returnval){ [ 2957.960503] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f5c7aa-9766-c30e-75e2-5ca18dee0b7c" [ 2957.960503] env[61663]: _type = "Task" [ 2957.960503] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2957.969627] env[61663]: DEBUG oslo_vmware.api [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f5c7aa-9766-c30e-75e2-5ca18dee0b7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2958.469936] env[61663]: DEBUG oslo_vmware.api [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f5c7aa-9766-c30e-75e2-5ca18dee0b7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2958.971643] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2958.971898] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] [instance: 41bde9a5-f03b-46e6-b868-95762d554114] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2958.972131] env[61663]: DEBUG oslo_concurrency.lockutils [None req-5f1ca763-5374-4dbf-9c03-a6f5b8eef92b tempest-ServersAaction247Test-1363678546 tempest-ServersAaction247Test-1363678546-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2959.692680] env[61663]: DEBUG oslo_service.periodic_task [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Running periodic task ComputeManager._run_image_cache_manager_pass {{(pid=61663) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2959.693102] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "storage-registry-lock" by "nova.virt.storage_users.register_storage_use..do_register_storage_use" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2959.693549] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "storage-registry-lock" acquired by "nova.virt.storage_users.register_storage_use..do_register_storage_use" :: waited 0.001s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2959.693884] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "storage-registry-lock" "released" by "nova.virt.storage_users.register_storage_use..do_register_storage_use" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2959.694055] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "storage-registry-lock" by "nova.virt.storage_users.get_storage_users..do_get_storage_users" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2959.694348] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "storage-registry-lock" acquired by "nova.virt.storage_users.get_storage_users..do_get_storage_users" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2959.694618] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Lock "storage-registry-lock" "released" by "nova.virt.storage_users.get_storage_users..do_get_storage_users" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2959.739976] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-215d8623-5f12-4849-8cc0-8fa641434ad3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2959.748569] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a583bb-ba0c-4836-99ac-553c737c3933 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2959.777019] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-031a3687-1061-422c-8c11-10510abc56e0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2959.782105] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2959.782105] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52026277-388d-9af4-9355-01112693c676" [ 2959.782105] env[61663]: _type = "Task" [ 2959.782105] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2959.790362] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52026277-388d-9af4-9355-01112693c676, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2960.292866] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52026277-388d-9af4-9355-01112693c676, 'name': SearchDatastore_Task, 'duration_secs': 0.022523} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2960.293530] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore1] devstack-image-cache_base/7305d72d-6c93-42c2-9dc8-56f7dcdb1973" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2960.293679] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore1] devstack-image-cache_base/7305d72d-6c93-42c2-9dc8-56f7dcdb1973" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2960.294259] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/7305d72d-6c93-42c2-9dc8-56f7dcdb1973" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2960.294346] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e756fb09-7258-476b-b869-64a8bbda9935 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2960.298667] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2960.298667] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526670bd-3835-511c-0dee-61315ce9c9ce" [ 2960.298667] env[61663]: _type = "Task" [ 2960.298667] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2960.305867] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526670bd-3835-511c-0dee-61315ce9c9ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2960.810074] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526670bd-3835-511c-0dee-61315ce9c9ce, 'name': SearchDatastore_Task, 'duration_secs': 0.010365} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2960.810966] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore1] devstack-image-cache_base/7305d72d-6c93-42c2-9dc8-56f7dcdb1973 is no longer used. Deleting! [ 2960.811137] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore1] devstack-image-cache_base/7305d72d-6c93-42c2-9dc8-56f7dcdb1973 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2960.811410] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e91d0b6-06fd-48f6-be3b-ed268e7d56df {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2960.816895] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2960.816895] env[61663]: value = "task-1690922" [ 2960.816895] env[61663]: _type = "Task" [ 2960.816895] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2960.824182] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690922, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2961.327154] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690922, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.119339} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2961.327418] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2961.327537] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore1] devstack-image-cache_base/7305d72d-6c93-42c2-9dc8-56f7dcdb1973" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2961.327767] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore1] devstack-image-cache_base/0aeaaf86-adec-4f48-a5cd-6a8d3f9bfff1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2961.327883] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore1] devstack-image-cache_base/0aeaaf86-adec-4f48-a5cd-6a8d3f9bfff1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2961.328228] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/0aeaaf86-adec-4f48-a5cd-6a8d3f9bfff1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2961.328496] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-862ea24f-7840-4804-9eed-3ca5e3bd7dd1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2961.332654] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2961.332654] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5201d4fa-6b67-9f9c-1183-9fb4f65ebe00" [ 2961.332654] env[61663]: _type = "Task" [ 2961.332654] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2961.339948] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5201d4fa-6b67-9f9c-1183-9fb4f65ebe00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2961.844539] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5201d4fa-6b67-9f9c-1183-9fb4f65ebe00, 'name': SearchDatastore_Task, 'duration_secs': 0.009378} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2961.844885] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore1] devstack-image-cache_base/0aeaaf86-adec-4f48-a5cd-6a8d3f9bfff1 is no longer used. Deleting! [ 2961.845027] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore1] devstack-image-cache_base/0aeaaf86-adec-4f48-a5cd-6a8d3f9bfff1 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2961.845200] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a93780b0-faa6-45aa-9c51-f3aea0a71376 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2961.851732] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2961.851732] env[61663]: value = "task-1690923" [ 2961.851732] env[61663]: _type = "Task" [ 2961.851732] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2961.860547] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690923, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2962.362300] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690923, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107975} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2962.362528] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2962.362685] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore1] devstack-image-cache_base/0aeaaf86-adec-4f48-a5cd-6a8d3f9bfff1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2962.362908] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore1] devstack-image-cache_base/9e0a47cf-5fba-4ad5-be00-211d9388d9ab" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2962.363043] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore1] devstack-image-cache_base/9e0a47cf-5fba-4ad5-be00-211d9388d9ab" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2962.363371] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/9e0a47cf-5fba-4ad5-be00-211d9388d9ab" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2962.363637] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a7f7de1-92f0-4788-9e09-df82ebe0525d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2962.368017] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2962.368017] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525ce280-c5ed-4e6b-b136-eb0dae2ab7e8" [ 2962.368017] env[61663]: _type = "Task" [ 2962.368017] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2962.375421] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525ce280-c5ed-4e6b-b136-eb0dae2ab7e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2962.836941] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a3c58f93-175c-45c0-8de2-5906042a1cd2 tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Acquiring lock "3930490f-586e-4bbd-aad2-1b4995ff6aa3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2962.879086] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525ce280-c5ed-4e6b-b136-eb0dae2ab7e8, 'name': SearchDatastore_Task, 'duration_secs': 0.009579} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2962.879406] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore1] devstack-image-cache_base/9e0a47cf-5fba-4ad5-be00-211d9388d9ab is no longer used. Deleting! [ 2962.879513] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore1] devstack-image-cache_base/9e0a47cf-5fba-4ad5-be00-211d9388d9ab {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2962.879718] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6bb82b3d-151d-4512-a34d-788e7e1d32e2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2962.885380] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2962.885380] env[61663]: value = "task-1690924" [ 2962.885380] env[61663]: _type = "Task" [ 2962.885380] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2962.893072] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690924, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2963.396106] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690924, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139339} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2963.396337] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2963.396544] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore1] devstack-image-cache_base/9e0a47cf-5fba-4ad5-be00-211d9388d9ab" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2963.396688] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore1] devstack-image-cache_base/a484b73b-04db-40ac-9db0-945e8e6344ae" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2963.396832] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore1] devstack-image-cache_base/a484b73b-04db-40ac-9db0-945e8e6344ae" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2963.397222] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/a484b73b-04db-40ac-9db0-945e8e6344ae" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2963.397506] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5b49c36-a416-4205-9ff9-2b066f7f0db9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2963.402313] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2963.402313] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52682203-c45e-7ae5-287e-928147930d3f" [ 2963.402313] env[61663]: _type = "Task" [ 2963.402313] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2963.410850] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52682203-c45e-7ae5-287e-928147930d3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2963.915538] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52682203-c45e-7ae5-287e-928147930d3f, 'name': SearchDatastore_Task, 'duration_secs': 0.010355} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2963.915992] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore1] devstack-image-cache_base/a484b73b-04db-40ac-9db0-945e8e6344ae is no longer used. Deleting! [ 2963.916280] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore1] devstack-image-cache_base/a484b73b-04db-40ac-9db0-945e8e6344ae {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2963.916662] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-76202f7e-10e3-4498-8373-1cca051b3790 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2963.924849] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2963.924849] env[61663]: value = "task-1690925" [ 2963.924849] env[61663]: _type = "Task" [ 2963.924849] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2963.936204] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690925, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2964.435259] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690925, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155804} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2964.435506] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2964.435729] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore1] devstack-image-cache_base/a484b73b-04db-40ac-9db0-945e8e6344ae" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2964.435867] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore1] devstack-image-cache_base/5c4dd705-3ebd-49c2-9c06-9370b69c5d89" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2964.435954] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore1] devstack-image-cache_base/5c4dd705-3ebd-49c2-9c06-9370b69c5d89" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2964.436316] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/5c4dd705-3ebd-49c2-9c06-9370b69c5d89" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2964.436588] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4889d6e-0534-44fe-ab49-0f10359439f4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2964.441034] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2964.441034] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5299a9eb-b1c8-688f-7897-46b21b0eb64e" [ 2964.441034] env[61663]: _type = "Task" [ 2964.441034] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2964.448233] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5299a9eb-b1c8-688f-7897-46b21b0eb64e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2964.951644] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5299a9eb-b1c8-688f-7897-46b21b0eb64e, 'name': SearchDatastore_Task, 'duration_secs': 0.027904} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2964.951960] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore1] devstack-image-cache_base/5c4dd705-3ebd-49c2-9c06-9370b69c5d89 is no longer used. Deleting! [ 2964.952128] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore1] devstack-image-cache_base/5c4dd705-3ebd-49c2-9c06-9370b69c5d89 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2964.952394] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-98d23b7e-67ca-4be1-81da-db15aa2143ef {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2964.958918] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2964.958918] env[61663]: value = "task-1690926" [ 2964.958918] env[61663]: _type = "Task" [ 2964.958918] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2964.971826] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690926, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2965.468278] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690926, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135611} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2965.468498] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2965.468681] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore1] devstack-image-cache_base/5c4dd705-3ebd-49c2-9c06-9370b69c5d89" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2965.468906] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore1] devstack-image-cache_base/bf8b04b1-9327-4959-b95d-ee019965b5c3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2965.469039] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore1] devstack-image-cache_base/bf8b04b1-9327-4959-b95d-ee019965b5c3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2965.469391] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/bf8b04b1-9327-4959-b95d-ee019965b5c3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2965.469648] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5766ecc-a1c9-4923-924d-64161900cbff {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2965.473672] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2965.473672] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52594c0e-4d49-54fa-9dbb-b701365c3af3" [ 2965.473672] env[61663]: _type = "Task" [ 2965.473672] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2965.482238] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52594c0e-4d49-54fa-9dbb-b701365c3af3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2965.983847] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52594c0e-4d49-54fa-9dbb-b701365c3af3, 'name': SearchDatastore_Task, 'duration_secs': 0.011083} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2965.984231] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore1] devstack-image-cache_base/bf8b04b1-9327-4959-b95d-ee019965b5c3 is no longer used. Deleting! [ 2965.984374] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore1] devstack-image-cache_base/bf8b04b1-9327-4959-b95d-ee019965b5c3 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2965.984593] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-22c6c577-f7e6-4225-96d0-28673fa899a8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2965.991065] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2965.991065] env[61663]: value = "task-1690927" [ 2965.991065] env[61663]: _type = "Task" [ 2965.991065] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2965.998503] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690927, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2966.501872] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690927, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.106696} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2966.501872] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2966.501872] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore1] devstack-image-cache_base/bf8b04b1-9327-4959-b95d-ee019965b5c3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2966.501872] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2966.501872] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2966.502213] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2966.502401] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20f2069a-e592-49d8-888e-f7894bccb858 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2966.506675] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2966.506675] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52095305-4552-558c-b427-9e335cd24443" [ 2966.506675] env[61663]: _type = "Task" [ 2966.506675] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2966.514050] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52095305-4552-558c-b427-9e335cd24443, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2967.017032] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2967.017294] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore1] devstack-image-cache_base/" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2967.017419] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore1] devstack-image-cache_base/" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2967.017732] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2967.018022] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd5faa14-d573-48a3-a032-15e3d837944f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.022187] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2967.022187] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c61ca5-9e4c-b51e-b532-7534598aab14" [ 2967.022187] env[61663]: _type = "Task" [ 2967.022187] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2967.029244] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c61ca5-9e4c-b51e-b532-7534598aab14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2967.532632] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c61ca5-9e4c-b51e-b532-7534598aab14, 'name': SearchDatastore_Task, 'duration_secs': 0.023808} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2967.532879] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore1] devstack-image-cache_base/" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2967.534859] env[61663]: DEBUG oslo_vmware.service [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47959621-e886-4284-aa16-5b8fc18fd8af {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.542324] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f8e1801-d3d8-4e2b-a8d8-192c9357c15b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.547166] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2967.547166] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529f25da-3064-ef87-f858-78e3133f1214" [ 2967.547166] env[61663]: _type = "Task" [ 2967.547166] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2967.556821] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529f25da-3064-ef87-f858-78e3133f1214, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2968.142315] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529f25da-3064-ef87-f858-78e3133f1214, 'name': SearchDatastore_Task, 'duration_secs': 0.293134} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2968.142837] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/37f1b3af-0964-40f6-a6a8-5ab96da09cf8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2968.142987] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/37f1b3af-0964-40f6-a6a8-5ab96da09cf8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2968.143348] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/37f1b3af-0964-40f6-a6a8-5ab96da09cf8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2968.143798] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-351a3313-68db-47b9-9015-162e6e8e6374 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2968.148931] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2968.148931] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520d0839-7d44-dfab-268e-30554a919c28" [ 2968.148931] env[61663]: _type = "Task" [ 2968.148931] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2968.156403] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520d0839-7d44-dfab-268e-30554a919c28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2968.661960] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520d0839-7d44-dfab-268e-30554a919c28, 'name': SearchDatastore_Task, 'duration_secs': 0.00865} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2968.661960] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/37f1b3af-0964-40f6-a6a8-5ab96da09cf8/ts-2024-12-01-04-38-14 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2968.661960] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7d68946-002f-4bf1-a025-e965980810e1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2968.672678] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/37f1b3af-0964-40f6-a6a8-5ab96da09cf8/ts-2024-12-01-04-38-14 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2968.672821] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 37f1b3af-0964-40f6-a6a8-5ab96da09cf8 is no longer used by this node. Pending deletion! [ 2968.672983] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/37f1b3af-0964-40f6-a6a8-5ab96da09cf8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2968.673219] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/3bcfdf96-0158-49ae-93e8-a47c7c97d2f6" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2968.673342] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/3bcfdf96-0158-49ae-93e8-a47c7c97d2f6" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2968.673721] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/3bcfdf96-0158-49ae-93e8-a47c7c97d2f6" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2968.673970] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51f0249b-1e9a-4090-8218-0375ca791dd9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2968.678173] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2968.678173] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522d31e5-2659-6c47-a7b4-83da832abc6a" [ 2968.678173] env[61663]: _type = "Task" [ 2968.678173] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2968.685340] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522d31e5-2659-6c47-a7b4-83da832abc6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2969.188533] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522d31e5-2659-6c47-a7b4-83da832abc6a, 'name': SearchDatastore_Task, 'duration_secs': 0.007421} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2969.188859] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/3bcfdf96-0158-49ae-93e8-a47c7c97d2f6/ts-2024-12-01-04-38-15 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2969.189051] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-64bac217-e9c6-4279-9e6b-e90354a9a3f4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2969.200800] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/3bcfdf96-0158-49ae-93e8-a47c7c97d2f6/ts-2024-12-01-04-38-15 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2969.200953] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 3bcfdf96-0158-49ae-93e8-a47c7c97d2f6 is no longer used by this node. Pending deletion! [ 2969.201172] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/3bcfdf96-0158-49ae-93e8-a47c7c97d2f6" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2969.201389] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/7372aa0a-7a5d-4679-b372-92593d284080" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2969.201509] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/7372aa0a-7a5d-4679-b372-92593d284080" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2969.201811] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/7372aa0a-7a5d-4679-b372-92593d284080" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2969.202048] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51fd1266-23cf-4ae7-8795-c1cf8fcf3e4b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2969.206030] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2969.206030] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a5755b-16f7-8940-d464-1b8a0b08fdc7" [ 2969.206030] env[61663]: _type = "Task" [ 2969.206030] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2969.213033] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a5755b-16f7-8940-d464-1b8a0b08fdc7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2969.716915] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a5755b-16f7-8940-d464-1b8a0b08fdc7, 'name': SearchDatastore_Task, 'duration_secs': 0.007959} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2969.717256] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/7372aa0a-7a5d-4679-b372-92593d284080/ts-2024-12-01-04-38-15 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2969.717524] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6d4313e-aa56-4ae7-a111-a5dab217cb59 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2969.728939] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/7372aa0a-7a5d-4679-b372-92593d284080/ts-2024-12-01-04-38-15 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2969.729144] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 7372aa0a-7a5d-4679-b372-92593d284080 is no longer used by this node. Pending deletion! [ 2969.729330] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/7372aa0a-7a5d-4679-b372-92593d284080" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2969.729551] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/6898bc5a-389e-43e5-b898-9ca48705357e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2969.729672] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/6898bc5a-389e-43e5-b898-9ca48705357e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2969.729977] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/6898bc5a-389e-43e5-b898-9ca48705357e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2969.730217] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9257724c-01a5-4604-8226-681344dc5619 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2969.734219] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2969.734219] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5263b86a-0cd9-1ac9-0e35-48ee5429d7a3" [ 2969.734219] env[61663]: _type = "Task" [ 2969.734219] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2969.741253] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5263b86a-0cd9-1ac9-0e35-48ee5429d7a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2970.244973] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5263b86a-0cd9-1ac9-0e35-48ee5429d7a3, 'name': SearchDatastore_Task, 'duration_secs': 0.007865} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2970.245351] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/6898bc5a-389e-43e5-b898-9ca48705357e/ts-2024-12-01-04-38-16 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2970.245516] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bde2e5fe-4b0e-4cab-8688-0b8f52e006c7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2970.256593] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/6898bc5a-389e-43e5-b898-9ca48705357e/ts-2024-12-01-04-38-16 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2970.256766] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 6898bc5a-389e-43e5-b898-9ca48705357e is no longer used by this node. Pending deletion! [ 2970.256908] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/6898bc5a-389e-43e5-b898-9ca48705357e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2970.257140] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/e38b6c22-637f-4d15-afc3-e244ec341338" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2970.257263] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/e38b6c22-637f-4d15-afc3-e244ec341338" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2970.257575] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/e38b6c22-637f-4d15-afc3-e244ec341338" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2970.257796] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c34beb7a-c507-42dd-bf76-c65e2ae864a8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2970.261603] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2970.261603] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527ed6d7-2c59-e81c-b4c1-5563ae21d31a" [ 2970.261603] env[61663]: _type = "Task" [ 2970.261603] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2970.268464] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527ed6d7-2c59-e81c-b4c1-5563ae21d31a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2970.772651] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527ed6d7-2c59-e81c-b4c1-5563ae21d31a, 'name': SearchDatastore_Task, 'duration_secs': 0.007412} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2970.772926] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/e38b6c22-637f-4d15-afc3-e244ec341338/ts-2024-12-01-04-38-16 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2970.773199] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-86b2f8b5-478c-4017-9d12-397af26cd0bb {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2970.784395] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/e38b6c22-637f-4d15-afc3-e244ec341338/ts-2024-12-01-04-38-16 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2970.784544] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image e38b6c22-637f-4d15-afc3-e244ec341338 is no longer used by this node. Pending deletion! [ 2970.784697] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/e38b6c22-637f-4d15-afc3-e244ec341338" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2970.784906] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/adc15ad2-b295-4efe-9018-af13770c3b9c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2970.785039] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/adc15ad2-b295-4efe-9018-af13770c3b9c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2970.785358] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/adc15ad2-b295-4efe-9018-af13770c3b9c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2970.785580] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c46b2446-e0a2-4cd2-a68b-821f0e9d9500 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2970.789461] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2970.789461] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e44a04-884d-4a56-0dd1-f552b0e30f0f" [ 2970.789461] env[61663]: _type = "Task" [ 2970.789461] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2970.796798] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e44a04-884d-4a56-0dd1-f552b0e30f0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2971.300060] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e44a04-884d-4a56-0dd1-f552b0e30f0f, 'name': SearchDatastore_Task, 'duration_secs': 0.008146} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2971.300358] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/adc15ad2-b295-4efe-9018-af13770c3b9c/ts-2024-12-01-04-38-17 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2971.300588] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d876e7f6-2b63-4527-9c82-1fbab81096f1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2971.311856] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/adc15ad2-b295-4efe-9018-af13770c3b9c/ts-2024-12-01-04-38-17 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2971.311991] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image adc15ad2-b295-4efe-9018-af13770c3b9c is no longer used by this node. Pending deletion! [ 2971.312171] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/adc15ad2-b295-4efe-9018-af13770c3b9c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2971.312384] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/d3e53c18-e553-4958-b217-1320392ac3a2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2971.312503] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/d3e53c18-e553-4958-b217-1320392ac3a2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2971.312815] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/d3e53c18-e553-4958-b217-1320392ac3a2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2971.313058] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15a37b16-53c1-430d-be22-ed7671e975c1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2971.316902] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2971.316902] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52638c38-ab11-a400-78db-fbcc8f0d906a" [ 2971.316902] env[61663]: _type = "Task" [ 2971.316902] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2971.323751] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52638c38-ab11-a400-78db-fbcc8f0d906a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2971.828090] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52638c38-ab11-a400-78db-fbcc8f0d906a, 'name': SearchDatastore_Task, 'duration_secs': 0.007908} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2971.828385] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/d3e53c18-e553-4958-b217-1320392ac3a2/ts-2024-12-01-04-38-17 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2971.828658] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-441b08c2-f71c-43db-b8a8-75e503cb56b5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2971.841886] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/d3e53c18-e553-4958-b217-1320392ac3a2/ts-2024-12-01-04-38-17 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2971.842111] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image d3e53c18-e553-4958-b217-1320392ac3a2 is no longer used by this node. Pending deletion! [ 2971.842359] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/d3e53c18-e553-4958-b217-1320392ac3a2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2971.842662] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/e85853cc-33ff-4df6-aa9d-eb02e938717e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2971.842841] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/e85853cc-33ff-4df6-aa9d-eb02e938717e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2971.843317] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/e85853cc-33ff-4df6-aa9d-eb02e938717e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2971.843669] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9879dc18-4e22-46b0-8151-a78bebcdccd8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2971.848293] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2971.848293] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5290deb0-dd4f-fc8a-31c4-18da2835d3a3" [ 2971.848293] env[61663]: _type = "Task" [ 2971.848293] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2971.855698] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5290deb0-dd4f-fc8a-31c4-18da2835d3a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2972.358757] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5290deb0-dd4f-fc8a-31c4-18da2835d3a3, 'name': SearchDatastore_Task, 'duration_secs': 0.009312} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2972.359079] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/e85853cc-33ff-4df6-aa9d-eb02e938717e/ts-2024-12-01-04-38-18 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2972.359267] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b9919a1b-9424-4867-9569-1918ced7958e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2972.370325] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/e85853cc-33ff-4df6-aa9d-eb02e938717e/ts-2024-12-01-04-38-18 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2972.370469] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image e85853cc-33ff-4df6-aa9d-eb02e938717e is no longer used by this node. Pending deletion! [ 2972.370614] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/e85853cc-33ff-4df6-aa9d-eb02e938717e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2972.370823] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/5fcb45d9-f822-49a0-a952-e654cbca1272" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2972.370942] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/5fcb45d9-f822-49a0-a952-e654cbca1272" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2972.371258] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/5fcb45d9-f822-49a0-a952-e654cbca1272" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2972.371480] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b440d62-6f31-48bc-b9ec-4601f297b1bf {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2972.375526] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2972.375526] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527d6bc5-cae7-2378-dca0-d065811e3424" [ 2972.375526] env[61663]: _type = "Task" [ 2972.375526] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2972.382240] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527d6bc5-cae7-2378-dca0-d065811e3424, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2972.886457] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527d6bc5-cae7-2378-dca0-d065811e3424, 'name': SearchDatastore_Task, 'duration_secs': 0.007445} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2972.886740] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/5fcb45d9-f822-49a0-a952-e654cbca1272/ts-2024-12-01-04-38-18 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2972.887034] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a1e45b73-c75c-4275-be3f-05202a9ba6e6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2972.899058] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/5fcb45d9-f822-49a0-a952-e654cbca1272/ts-2024-12-01-04-38-18 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2972.899058] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 5fcb45d9-f822-49a0-a952-e654cbca1272 is no longer used by this node. Pending deletion! [ 2972.899058] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/5fcb45d9-f822-49a0-a952-e654cbca1272" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2972.899248] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/f3bdee02-6075-46e2-8187-0ca1a297b0bf" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2972.899285] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/f3bdee02-6075-46e2-8187-0ca1a297b0bf" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2972.899568] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/f3bdee02-6075-46e2-8187-0ca1a297b0bf" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2972.899808] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9587617-1092-4d73-aff9-0204d08d5975 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2972.903764] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2972.903764] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5276af88-f99d-3b8b-4a52-d0ecef9cefc5" [ 2972.903764] env[61663]: _type = "Task" [ 2972.903764] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2972.910850] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5276af88-f99d-3b8b-4a52-d0ecef9cefc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2973.414431] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5276af88-f99d-3b8b-4a52-d0ecef9cefc5, 'name': SearchDatastore_Task, 'duration_secs': 0.006982} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2973.414694] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/f3bdee02-6075-46e2-8187-0ca1a297b0bf/ts-2024-12-01-04-38-19 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2973.414911] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-321e1bf6-9e31-4a96-9a81-62a7775e3f86 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2973.425626] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/f3bdee02-6075-46e2-8187-0ca1a297b0bf/ts-2024-12-01-04-38-19 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2973.425770] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image f3bdee02-6075-46e2-8187-0ca1a297b0bf is no longer used by this node. Pending deletion! [ 2973.425931] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/f3bdee02-6075-46e2-8187-0ca1a297b0bf" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2973.426154] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/267e0f70-1a50-489a-ab47-6ae3fe426d80" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2973.426275] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/267e0f70-1a50-489a-ab47-6ae3fe426d80" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2973.426576] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/267e0f70-1a50-489a-ab47-6ae3fe426d80" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2973.426800] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b3ebdbe-02e7-43a9-86dd-775c619699e7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2973.430634] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2973.430634] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ba40f9-9124-9247-fdab-eda1488d2014" [ 2973.430634] env[61663]: _type = "Task" [ 2973.430634] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2973.437452] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ba40f9-9124-9247-fdab-eda1488d2014, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2973.940567] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ba40f9-9124-9247-fdab-eda1488d2014, 'name': SearchDatastore_Task, 'duration_secs': 0.006909} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2973.940934] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/267e0f70-1a50-489a-ab47-6ae3fe426d80/ts-2024-12-01-04-38-19 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2973.941101] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a6283b8-71b4-4dda-b071-63e02f648ee0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2973.952415] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/267e0f70-1a50-489a-ab47-6ae3fe426d80/ts-2024-12-01-04-38-19 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2973.952559] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 267e0f70-1a50-489a-ab47-6ae3fe426d80 is no longer used by this node. Pending deletion! [ 2973.952724] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/267e0f70-1a50-489a-ab47-6ae3fe426d80" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2973.952940] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/e7e333d1-0265-479a-b844-06a3f0e0e6b0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2973.953083] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/e7e333d1-0265-479a-b844-06a3f0e0e6b0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2973.953395] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/e7e333d1-0265-479a-b844-06a3f0e0e6b0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2973.953626] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c06073fe-c9c3-45c4-af79-5aa37c9e7ed8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2973.957634] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2973.957634] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5231928f-76a5-5b92-c472-d4d594827690" [ 2973.957634] env[61663]: _type = "Task" [ 2973.957634] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2973.965202] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5231928f-76a5-5b92-c472-d4d594827690, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2974.468078] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5231928f-76a5-5b92-c472-d4d594827690, 'name': SearchDatastore_Task, 'duration_secs': 0.007345} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2974.468361] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/e7e333d1-0265-479a-b844-06a3f0e0e6b0/ts-2024-12-01-04-38-20 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2974.468621] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6148cd3-3bc9-49f8-ad28-62b6e7191822 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2974.479811] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/e7e333d1-0265-479a-b844-06a3f0e0e6b0/ts-2024-12-01-04-38-20 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2974.479953] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image e7e333d1-0265-479a-b844-06a3f0e0e6b0 is no longer used by this node. Pending deletion! [ 2974.480098] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/e7e333d1-0265-479a-b844-06a3f0e0e6b0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2974.480317] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/5fcbaa3e-d808-4ffa-982b-1e084a55d8dd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2974.480438] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/5fcbaa3e-d808-4ffa-982b-1e084a55d8dd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2974.480736] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/5fcbaa3e-d808-4ffa-982b-1e084a55d8dd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2974.480967] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37214a10-1646-40bd-9133-4d175a1da33d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2974.485030] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2974.485030] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5226fbab-c440-3ce1-b9f3-b71afb9114f4" [ 2974.485030] env[61663]: _type = "Task" [ 2974.485030] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2974.491883] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5226fbab-c440-3ce1-b9f3-b71afb9114f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2974.996745] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5226fbab-c440-3ce1-b9f3-b71afb9114f4, 'name': SearchDatastore_Task, 'duration_secs': 0.006902} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2974.997060] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/5fcbaa3e-d808-4ffa-982b-1e084a55d8dd/ts-2024-12-01-04-38-20 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2974.997346] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58869782-7906-47fd-b26a-0d099abff88b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2975.008763] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/5fcbaa3e-d808-4ffa-982b-1e084a55d8dd/ts-2024-12-01-04-38-20 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2975.008867] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 5fcbaa3e-d808-4ffa-982b-1e084a55d8dd is no longer used by this node. Pending deletion! [ 2975.009027] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/5fcbaa3e-d808-4ffa-982b-1e084a55d8dd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2975.009253] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/3e1cddab-30ae-46b9-aa3e-b6cb1e7b0f9a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2975.009369] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/3e1cddab-30ae-46b9-aa3e-b6cb1e7b0f9a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2975.009682] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/3e1cddab-30ae-46b9-aa3e-b6cb1e7b0f9a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2975.009936] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73cc03ce-dcb6-4182-ad10-c619b7a8ab3d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2975.014496] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2975.014496] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520f643a-130e-a899-803a-7d1c6484cb97" [ 2975.014496] env[61663]: _type = "Task" [ 2975.014496] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2975.022012] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520f643a-130e-a899-803a-7d1c6484cb97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2975.524999] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520f643a-130e-a899-803a-7d1c6484cb97, 'name': SearchDatastore_Task, 'duration_secs': 0.00724} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2975.525271] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/3e1cddab-30ae-46b9-aa3e-b6cb1e7b0f9a/ts-2024-12-01-04-38-21 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2975.525492] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-62045fdc-ae23-43db-bef1-4ed3d46c8b44 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2975.536587] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/3e1cddab-30ae-46b9-aa3e-b6cb1e7b0f9a/ts-2024-12-01-04-38-21 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2975.536729] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 3e1cddab-30ae-46b9-aa3e-b6cb1e7b0f9a is no longer used by this node. Pending deletion! [ 2975.536908] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/3e1cddab-30ae-46b9-aa3e-b6cb1e7b0f9a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2975.537147] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/e6767764-d589-41f9-abd0-806c3f46c0a0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2975.537273] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/e6767764-d589-41f9-abd0-806c3f46c0a0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2975.537584] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/e6767764-d589-41f9-abd0-806c3f46c0a0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2975.537826] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-518d44dd-5bae-432b-a3df-9e2485fa8d6a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2975.541745] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2975.541745] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5247b298-5f23-2f46-cbc2-e7f27c974286" [ 2975.541745] env[61663]: _type = "Task" [ 2975.541745] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2975.548717] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5247b298-5f23-2f46-cbc2-e7f27c974286, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2976.052389] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5247b298-5f23-2f46-cbc2-e7f27c974286, 'name': SearchDatastore_Task, 'duration_secs': 0.013863} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2976.052656] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/e6767764-d589-41f9-abd0-806c3f46c0a0/ts-2024-12-01-04-38-21 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2976.052904] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dcce5b1e-9f00-4d7b-93be-ae1f193db16c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2976.063634] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/e6767764-d589-41f9-abd0-806c3f46c0a0/ts-2024-12-01-04-38-21 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2976.063771] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image e6767764-d589-41f9-abd0-806c3f46c0a0 is no longer used by this node. Pending deletion! [ 2976.063935] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/e6767764-d589-41f9-abd0-806c3f46c0a0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2976.064159] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/f4e9cae5-d47d-4ca9-b40e-dad0047fa038" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2976.064338] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/f4e9cae5-d47d-4ca9-b40e-dad0047fa038" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2976.064646] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/f4e9cae5-d47d-4ca9-b40e-dad0047fa038" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2976.064866] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-911bb06a-df10-40c4-a3ee-05a905f7fd84 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2976.068725] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2976.068725] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52359e85-248a-907c-0407-b26826c9683c" [ 2976.068725] env[61663]: _type = "Task" [ 2976.068725] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2976.076144] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52359e85-248a-907c-0407-b26826c9683c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2976.578647] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52359e85-248a-907c-0407-b26826c9683c, 'name': SearchDatastore_Task, 'duration_secs': 0.007336} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2976.578943] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/f4e9cae5-d47d-4ca9-b40e-dad0047fa038/ts-2024-12-01-04-38-22 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2976.579174] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb77d463-c1b0-46ad-a0d2-93d364a2267e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2976.590341] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/f4e9cae5-d47d-4ca9-b40e-dad0047fa038/ts-2024-12-01-04-38-22 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2976.590487] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image f4e9cae5-d47d-4ca9-b40e-dad0047fa038 is no longer used by this node. Pending deletion! [ 2976.590644] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/f4e9cae5-d47d-4ca9-b40e-dad0047fa038" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2976.590853] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/492abefa-e9e7-4339-8c76-8aa07c82100a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2976.590973] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/492abefa-e9e7-4339-8c76-8aa07c82100a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2976.591295] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/492abefa-e9e7-4339-8c76-8aa07c82100a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2976.591525] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d8af8b3-0aee-4869-96b0-f6d7027a80e2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2976.595293] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2976.595293] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b61b1b-4dc8-8c03-5ef6-60c521fd63ed" [ 2976.595293] env[61663]: _type = "Task" [ 2976.595293] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2976.602399] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b61b1b-4dc8-8c03-5ef6-60c521fd63ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2977.105216] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b61b1b-4dc8-8c03-5ef6-60c521fd63ed, 'name': SearchDatastore_Task, 'duration_secs': 0.007425} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2977.105491] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/492abefa-e9e7-4339-8c76-8aa07c82100a/ts-2024-12-01-04-38-22 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2977.105746] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f5cd422c-559f-4baa-8e3b-993d909e15d5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2977.117247] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/492abefa-e9e7-4339-8c76-8aa07c82100a/ts-2024-12-01-04-38-22 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2977.117398] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 492abefa-e9e7-4339-8c76-8aa07c82100a is no longer used by this node. Pending deletion! [ 2977.117565] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/492abefa-e9e7-4339-8c76-8aa07c82100a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2977.117780] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/74fc175e-dd20-42f8-b460-993f91387ecc" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2977.117899] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/74fc175e-dd20-42f8-b460-993f91387ecc" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2977.118232] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/74fc175e-dd20-42f8-b460-993f91387ecc" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2977.118465] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec453829-2eb5-456c-a462-02c567014572 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2977.123018] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2977.123018] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52bbe008-c0a7-6326-5550-00ae174a5152" [ 2977.123018] env[61663]: _type = "Task" [ 2977.123018] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2977.130206] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52bbe008-c0a7-6326-5550-00ae174a5152, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2977.635446] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52bbe008-c0a7-6326-5550-00ae174a5152, 'name': SearchDatastore_Task, 'duration_secs': 0.007583} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2977.635730] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/74fc175e-dd20-42f8-b460-993f91387ecc/ts-2024-12-01-04-38-23 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2977.636000] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ada9cf0-9e38-49f1-b9a7-07db8a115762 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2977.646870] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/74fc175e-dd20-42f8-b460-993f91387ecc/ts-2024-12-01-04-38-23 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2977.647053] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 74fc175e-dd20-42f8-b460-993f91387ecc is no longer used by this node. Pending deletion! [ 2977.647223] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/74fc175e-dd20-42f8-b460-993f91387ecc" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2977.647463] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/4e7f62b8-7536-4e97-a5a2-44ddc1f61c47" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2977.647556] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/4e7f62b8-7536-4e97-a5a2-44ddc1f61c47" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2977.647863] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/4e7f62b8-7536-4e97-a5a2-44ddc1f61c47" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2977.648112] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f43eb0e9-46d5-44b8-9bd3-a2033dd798a2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2977.652134] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2977.652134] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5212e71e-6172-536b-cbc4-93ba6942c540" [ 2977.652134] env[61663]: _type = "Task" [ 2977.652134] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2977.659102] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5212e71e-6172-536b-cbc4-93ba6942c540, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2978.163271] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5212e71e-6172-536b-cbc4-93ba6942c540, 'name': SearchDatastore_Task, 'duration_secs': 0.007071} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2978.163538] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/4e7f62b8-7536-4e97-a5a2-44ddc1f61c47/ts-2024-12-01-04-38-24 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2978.163793] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29a5fa49-8392-4e2a-a376-b065a826f6b6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2978.175237] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/4e7f62b8-7536-4e97-a5a2-44ddc1f61c47/ts-2024-12-01-04-38-24 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2978.175382] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 4e7f62b8-7536-4e97-a5a2-44ddc1f61c47 is no longer used by this node. Pending deletion! [ 2978.175545] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/4e7f62b8-7536-4e97-a5a2-44ddc1f61c47" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2978.175757] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/39b899f7-5a37-412c-9f79-91916c6dd28b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2978.175877] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/39b899f7-5a37-412c-9f79-91916c6dd28b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2978.176199] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/39b899f7-5a37-412c-9f79-91916c6dd28b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2978.176436] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3660f3de-83ad-4c1e-9074-fedfa91af1f6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2978.180440] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2978.180440] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520a4ae4-8ec3-c1d0-7445-7c809e9e0be2" [ 2978.180440] env[61663]: _type = "Task" [ 2978.180440] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2978.187362] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520a4ae4-8ec3-c1d0-7445-7c809e9e0be2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2978.691133] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520a4ae4-8ec3-c1d0-7445-7c809e9e0be2, 'name': SearchDatastore_Task, 'duration_secs': 0.008187} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2978.691473] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/39b899f7-5a37-412c-9f79-91916c6dd28b is no longer used. Deleting! [ 2978.691663] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/39b899f7-5a37-412c-9f79-91916c6dd28b {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2978.691948] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-76f42e21-55ea-4fed-83d9-ffd943afce82 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2978.697828] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2978.697828] env[61663]: value = "task-1690928" [ 2978.697828] env[61663]: _type = "Task" [ 2978.697828] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2978.704935] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690928, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2979.208027] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690928, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099679} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2979.208350] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2979.208580] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/39b899f7-5a37-412c-9f79-91916c6dd28b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2979.208838] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/5e723e1b-1c41-4f3f-9f34-cbb2f2679096" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2979.208987] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/5e723e1b-1c41-4f3f-9f34-cbb2f2679096" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2979.209369] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/5e723e1b-1c41-4f3f-9f34-cbb2f2679096" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2979.209670] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed513059-2603-4c45-9c6d-5a726c4f74ae {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2979.214017] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2979.214017] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528f695c-33d5-1059-3ce6-74f2731c4a76" [ 2979.214017] env[61663]: _type = "Task" [ 2979.214017] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2979.221335] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528f695c-33d5-1059-3ce6-74f2731c4a76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2979.724856] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528f695c-33d5-1059-3ce6-74f2731c4a76, 'name': SearchDatastore_Task, 'duration_secs': 0.008275} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2979.725207] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/5e723e1b-1c41-4f3f-9f34-cbb2f2679096/ts-2024-12-01-04-38-25 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2979.725479] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2702b90-116d-4a82-977a-1ac3e84e0e47 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2979.736588] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/5e723e1b-1c41-4f3f-9f34-cbb2f2679096/ts-2024-12-01-04-38-25 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2979.736736] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 5e723e1b-1c41-4f3f-9f34-cbb2f2679096 is no longer used by this node. Pending deletion! [ 2979.736899] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/5e723e1b-1c41-4f3f-9f34-cbb2f2679096" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2979.737284] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/c544af9d-51f5-44c6-aab8-9d806eb42a31" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2979.737284] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/c544af9d-51f5-44c6-aab8-9d806eb42a31" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2979.737579] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/c544af9d-51f5-44c6-aab8-9d806eb42a31" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2979.737824] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60f2010b-0366-48c7-b04f-24f0ccbd4fa1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2979.741983] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2979.741983] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ea71e7-0359-8bd9-45ef-d02f991d4feb" [ 2979.741983] env[61663]: _type = "Task" [ 2979.741983] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2979.749058] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ea71e7-0359-8bd9-45ef-d02f991d4feb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2980.253061] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ea71e7-0359-8bd9-45ef-d02f991d4feb, 'name': SearchDatastore_Task, 'duration_secs': 0.008391} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2980.253436] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/c544af9d-51f5-44c6-aab8-9d806eb42a31 is no longer used. Deleting! [ 2980.253605] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/c544af9d-51f5-44c6-aab8-9d806eb42a31 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2980.253902] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-499b6ed0-1c20-4d5e-901c-b0b0ea7dfc6d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2980.260524] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2980.260524] env[61663]: value = "task-1690929" [ 2980.260524] env[61663]: _type = "Task" [ 2980.260524] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2980.268300] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690929, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2980.770422] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690929, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096677} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2980.770769] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2980.770807] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/c544af9d-51f5-44c6-aab8-9d806eb42a31" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2980.771030] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/e1e24422-b6d2-476a-997c-018b7cd6b067" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2980.771156] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/e1e24422-b6d2-476a-997c-018b7cd6b067" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2980.771469] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/e1e24422-b6d2-476a-997c-018b7cd6b067" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2980.771733] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79f9fd76-751b-409e-ad2f-a96894e8348c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2980.776118] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2980.776118] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ba01ce-449e-28d3-1f8f-434e77b8ddf6" [ 2980.776118] env[61663]: _type = "Task" [ 2980.776118] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2980.783347] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ba01ce-449e-28d3-1f8f-434e77b8ddf6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2981.287129] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ba01ce-449e-28d3-1f8f-434e77b8ddf6, 'name': SearchDatastore_Task, 'duration_secs': 0.008042} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2981.287129] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/e1e24422-b6d2-476a-997c-018b7cd6b067/ts-2024-12-01-04-38-27 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2981.287390] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-53f14a7e-d78f-4600-8e97-d6ddea6593b9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2981.297991] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/e1e24422-b6d2-476a-997c-018b7cd6b067/ts-2024-12-01-04-38-27 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2981.298154] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image e1e24422-b6d2-476a-997c-018b7cd6b067 is no longer used by this node. Pending deletion! [ 2981.298315] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/e1e24422-b6d2-476a-997c-018b7cd6b067" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2981.298532] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/b799e0ba-4624-452d-a6f2-e69a7ddcac2a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2981.298651] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/b799e0ba-4624-452d-a6f2-e69a7ddcac2a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2981.298955] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/b799e0ba-4624-452d-a6f2-e69a7ddcac2a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2981.299196] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c7d8c56-b1a1-49a2-9e28-8471397b0b0f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2981.303114] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2981.303114] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527ae063-d638-b518-60e5-797046dec3c4" [ 2981.303114] env[61663]: _type = "Task" [ 2981.303114] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2981.310063] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527ae063-d638-b518-60e5-797046dec3c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2981.813819] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527ae063-d638-b518-60e5-797046dec3c4, 'name': SearchDatastore_Task, 'duration_secs': 0.00808} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2981.814211] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/b799e0ba-4624-452d-a6f2-e69a7ddcac2a/ts-2024-12-01-04-38-27 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2981.814348] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94f72f81-7053-4ccf-8558-9e2d3cc3bd5d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2981.825756] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/b799e0ba-4624-452d-a6f2-e69a7ddcac2a/ts-2024-12-01-04-38-27 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2981.825908] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image b799e0ba-4624-452d-a6f2-e69a7ddcac2a is no longer used by this node. Pending deletion! [ 2981.826084] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/b799e0ba-4624-452d-a6f2-e69a7ddcac2a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2981.826313] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/59db50ba-652d-47ec-87e4-58b1bc08d0b2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2981.826429] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/59db50ba-652d-47ec-87e4-58b1bc08d0b2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2981.826738] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/59db50ba-652d-47ec-87e4-58b1bc08d0b2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2981.826977] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46897513-1bb4-4bc3-9aa5-00e31f0a4d4d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2981.831042] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2981.831042] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5219d732-d1dc-8075-c68e-e78ffcbca8e7" [ 2981.831042] env[61663]: _type = "Task" [ 2981.831042] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2981.838091] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5219d732-d1dc-8075-c68e-e78ffcbca8e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2982.341876] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5219d732-d1dc-8075-c68e-e78ffcbca8e7, 'name': SearchDatastore_Task, 'duration_secs': 0.007103} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2982.342159] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/59db50ba-652d-47ec-87e4-58b1bc08d0b2/ts-2024-12-01-04-38-28 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2982.342422] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bc646cd4-b13b-4efb-87d3-9ace965e0163 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2982.353365] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/59db50ba-652d-47ec-87e4-58b1bc08d0b2/ts-2024-12-01-04-38-28 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2982.353483] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 59db50ba-652d-47ec-87e4-58b1bc08d0b2 is no longer used by this node. Pending deletion! [ 2982.353594] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/59db50ba-652d-47ec-87e4-58b1bc08d0b2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2982.353802] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/99fec921-2da8-42e5-b5dd-36eb6c744ad1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2982.353920] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/99fec921-2da8-42e5-b5dd-36eb6c744ad1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2982.354242] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/99fec921-2da8-42e5-b5dd-36eb6c744ad1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2982.354511] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1d94476-6319-4301-8a30-41df4d110818 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2982.358413] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2982.358413] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526607c2-02db-6c71-51ba-0e2c47d17e34" [ 2982.358413] env[61663]: _type = "Task" [ 2982.358413] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2982.365953] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526607c2-02db-6c71-51ba-0e2c47d17e34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2982.869127] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526607c2-02db-6c71-51ba-0e2c47d17e34, 'name': SearchDatastore_Task, 'duration_secs': 0.007914} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2982.869513] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/99fec921-2da8-42e5-b5dd-36eb6c744ad1/ts-2024-12-01-04-38-28 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2982.869642] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66ed917c-56a5-42f0-b576-8380cb471495 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2982.880890] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/99fec921-2da8-42e5-b5dd-36eb6c744ad1/ts-2024-12-01-04-38-28 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2982.881051] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 99fec921-2da8-42e5-b5dd-36eb6c744ad1 is no longer used by this node. Pending deletion! [ 2982.881221] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/99fec921-2da8-42e5-b5dd-36eb6c744ad1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2982.881438] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/fa91c154-a109-4841-8672-3dbbbb0a3112" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2982.881787] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/fa91c154-a109-4841-8672-3dbbbb0a3112" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2982.881891] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/fa91c154-a109-4841-8672-3dbbbb0a3112" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2982.882133] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97f869f6-17d1-4c64-994e-f40b4dd78126 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2982.886130] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2982.886130] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f3830c-5bf6-ba9a-a05b-d83a53d06037" [ 2982.886130] env[61663]: _type = "Task" [ 2982.886130] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2982.893381] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f3830c-5bf6-ba9a-a05b-d83a53d06037, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2983.396503] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f3830c-5bf6-ba9a-a05b-d83a53d06037, 'name': SearchDatastore_Task, 'duration_secs': 0.007292} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2983.396735] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/fa91c154-a109-4841-8672-3dbbbb0a3112/ts-2024-12-01-04-38-29 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2983.396987] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e57145d4-608f-4bcb-a7ca-b290505bedd2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2983.407973] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/fa91c154-a109-4841-8672-3dbbbb0a3112/ts-2024-12-01-04-38-29 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2983.408136] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image fa91c154-a109-4841-8672-3dbbbb0a3112 is no longer used by this node. Pending deletion! [ 2983.408280] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/fa91c154-a109-4841-8672-3dbbbb0a3112" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2983.408487] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/1f9181b1-8871-48cd-86a4-49e15b93a154" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2983.408604] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/1f9181b1-8871-48cd-86a4-49e15b93a154" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2983.408905] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/1f9181b1-8871-48cd-86a4-49e15b93a154" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2983.409138] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed034210-bb64-43c0-aafa-751a2fe9fe75 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2983.413062] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2983.413062] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d8a0a9-53c6-abaa-6c26-32a556af9b74" [ 2983.413062] env[61663]: _type = "Task" [ 2983.413062] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2983.419758] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d8a0a9-53c6-abaa-6c26-32a556af9b74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2983.923229] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d8a0a9-53c6-abaa-6c26-32a556af9b74, 'name': SearchDatastore_Task, 'duration_secs': 0.007211} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2983.923548] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/1f9181b1-8871-48cd-86a4-49e15b93a154/ts-2024-12-01-04-38-29 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2983.923752] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8a8d6d0-7d9e-45f8-a2cf-f4e9c793c340 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2983.934416] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/1f9181b1-8871-48cd-86a4-49e15b93a154/ts-2024-12-01-04-38-29 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2983.934623] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 1f9181b1-8871-48cd-86a4-49e15b93a154 is no longer used by this node. Pending deletion! [ 2983.934716] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/1f9181b1-8871-48cd-86a4-49e15b93a154" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2983.934932] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/576baaf5-a737-486d-bf36-13a254a62f66" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2983.935098] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/576baaf5-a737-486d-bf36-13a254a62f66" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2983.935409] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/576baaf5-a737-486d-bf36-13a254a62f66" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2983.935636] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c263a3f-caa3-451b-b325-8d5b2b44166c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2983.939604] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2983.939604] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b6ec90-f070-0d10-af6f-18ca7d70f8d9" [ 2983.939604] env[61663]: _type = "Task" [ 2983.939604] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2983.947097] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b6ec90-f070-0d10-af6f-18ca7d70f8d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2984.450701] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b6ec90-f070-0d10-af6f-18ca7d70f8d9, 'name': SearchDatastore_Task, 'duration_secs': 0.008495} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2984.450956] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/576baaf5-a737-486d-bf36-13a254a62f66 is no longer used. Deleting! [ 2984.451118] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/576baaf5-a737-486d-bf36-13a254a62f66 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2984.451381] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f28a23ef-047f-4776-8b85-f55eadd5283e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2984.457159] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2984.457159] env[61663]: value = "task-1690930" [ 2984.457159] env[61663]: _type = "Task" [ 2984.457159] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2984.465014] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690930, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2984.967548] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690930, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099841} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2984.967927] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2984.967927] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/576baaf5-a737-486d-bf36-13a254a62f66" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2984.968165] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/a8667823-58e2-446a-842c-c0a7cfc4db10" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2984.968265] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/a8667823-58e2-446a-842c-c0a7cfc4db10" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2984.968637] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/a8667823-58e2-446a-842c-c0a7cfc4db10" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2984.968915] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83007e1f-2717-4809-9542-633dcba2f1fc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2984.973371] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2984.973371] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5218b315-3686-2afc-0aa4-243098de844a" [ 2984.973371] env[61663]: _type = "Task" [ 2984.973371] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2984.980563] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5218b315-3686-2afc-0aa4-243098de844a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2985.484207] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5218b315-3686-2afc-0aa4-243098de844a, 'name': SearchDatastore_Task, 'duration_secs': 0.00799} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2985.484484] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/a8667823-58e2-446a-842c-c0a7cfc4db10/ts-2024-12-01-04-38-31 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2985.484745] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb69292d-7ca5-46dd-ab0a-4e31de711ca1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2985.495991] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/a8667823-58e2-446a-842c-c0a7cfc4db10/ts-2024-12-01-04-38-31 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2985.496155] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image a8667823-58e2-446a-842c-c0a7cfc4db10 is no longer used by this node. Pending deletion! [ 2985.496333] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/a8667823-58e2-446a-842c-c0a7cfc4db10" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2985.496551] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/8c397d6a-2255-40a1-a544-a5131382ed19" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2985.496670] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/8c397d6a-2255-40a1-a544-a5131382ed19" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2985.496968] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c397d6a-2255-40a1-a544-a5131382ed19" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2985.497238] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a7e4a27-a20e-45bd-9641-66685771344d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2985.501327] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2985.501327] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5276ba01-10de-0009-d6a2-10ba9d346708" [ 2985.501327] env[61663]: _type = "Task" [ 2985.501327] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2985.508558] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5276ba01-10de-0009-d6a2-10ba9d346708, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2986.011345] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5276ba01-10de-0009-d6a2-10ba9d346708, 'name': SearchDatastore_Task, 'duration_secs': 0.007261} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2986.011663] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/8c397d6a-2255-40a1-a544-a5131382ed19/ts-2024-12-01-04-38-31 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2986.011865] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b770efd0-4a29-4ebc-bb75-2cbe10a11282 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2986.022934] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/8c397d6a-2255-40a1-a544-a5131382ed19/ts-2024-12-01-04-38-31 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2986.023085] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 8c397d6a-2255-40a1-a544-a5131382ed19 is no longer used by this node. Pending deletion! [ 2986.023251] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/8c397d6a-2255-40a1-a544-a5131382ed19" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2986.023466] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/0d84a0cd-f7b3-4ccd-aa60-88bf17d55047" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2986.023584] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/0d84a0cd-f7b3-4ccd-aa60-88bf17d55047" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2986.023896] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/0d84a0cd-f7b3-4ccd-aa60-88bf17d55047" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2986.024128] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e23f6232-9994-4d20-90ee-042c0781909e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2986.028443] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2986.028443] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a14a19-e7e0-e30c-7409-c2d99b84d5a7" [ 2986.028443] env[61663]: _type = "Task" [ 2986.028443] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2986.035660] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a14a19-e7e0-e30c-7409-c2d99b84d5a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2986.539525] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a14a19-e7e0-e30c-7409-c2d99b84d5a7, 'name': SearchDatastore_Task, 'duration_secs': 0.007301} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2986.539804] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/0d84a0cd-f7b3-4ccd-aa60-88bf17d55047/ts-2024-12-01-04-38-32 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2986.540068] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc43c841-f295-4a2d-96e7-10014f8fa1b2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2986.551831] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/0d84a0cd-f7b3-4ccd-aa60-88bf17d55047/ts-2024-12-01-04-38-32 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2986.551978] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 0d84a0cd-f7b3-4ccd-aa60-88bf17d55047 is no longer used by this node. Pending deletion! [ 2986.552157] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/0d84a0cd-f7b3-4ccd-aa60-88bf17d55047" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2986.552382] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/4984054a-3a6f-44a8-99e9-437408028754" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2986.552503] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/4984054a-3a6f-44a8-99e9-437408028754" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2986.552875] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/4984054a-3a6f-44a8-99e9-437408028754" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2986.553135] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6987dd35-c0ba-4906-8c2d-3b182cdf08e6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2986.557225] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2986.557225] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ccab33-082d-bf99-d6eb-147e8b31cf28" [ 2986.557225] env[61663]: _type = "Task" [ 2986.557225] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2986.564308] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ccab33-082d-bf99-d6eb-147e8b31cf28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2987.067224] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ccab33-082d-bf99-d6eb-147e8b31cf28, 'name': SearchDatastore_Task, 'duration_secs': 0.008931} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2987.067517] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/4984054a-3a6f-44a8-99e9-437408028754/ts-2024-12-01-04-38-32 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2987.067758] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4050e098-ba3f-4281-b1e1-5b54057b0591 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2987.078552] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/4984054a-3a6f-44a8-99e9-437408028754/ts-2024-12-01-04-38-32 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2987.078693] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 4984054a-3a6f-44a8-99e9-437408028754 is no longer used by this node. Pending deletion! [ 2987.078835] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/4984054a-3a6f-44a8-99e9-437408028754" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2987.079061] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/3547d9fe-f05d-4a3b-83a4-92132625a5e1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2987.079217] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/3547d9fe-f05d-4a3b-83a4-92132625a5e1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2987.079526] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/3547d9fe-f05d-4a3b-83a4-92132625a5e1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2987.079755] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7282d8a1-45d0-4774-97db-152d794b6f5a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2987.083855] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2987.083855] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522fab9b-a416-474b-b60a-71f4331b42bf" [ 2987.083855] env[61663]: _type = "Task" [ 2987.083855] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2987.090688] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522fab9b-a416-474b-b60a-71f4331b42bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2987.594943] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522fab9b-a416-474b-b60a-71f4331b42bf, 'name': SearchDatastore_Task, 'duration_secs': 0.007482} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2987.595253] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/3547d9fe-f05d-4a3b-83a4-92132625a5e1/ts-2024-12-01-04-38-33 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2987.595514] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f3f3f47-7e02-48b2-b1dd-f0b0f562e543 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2987.606392] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/3547d9fe-f05d-4a3b-83a4-92132625a5e1/ts-2024-12-01-04-38-33 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2987.606534] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 3547d9fe-f05d-4a3b-83a4-92132625a5e1 is no longer used by this node. Pending deletion! [ 2987.606679] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/3547d9fe-f05d-4a3b-83a4-92132625a5e1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2987.606887] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/85e635b0-17f0-4a4c-8b6c-b169f397e4d3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2987.607014] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/85e635b0-17f0-4a4c-8b6c-b169f397e4d3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2987.607346] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/85e635b0-17f0-4a4c-8b6c-b169f397e4d3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2987.607573] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3bd75361-0264-44d2-b732-a1053e9f1563 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2987.611340] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2987.611340] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a86001-071b-002a-c1d6-fbc3d86bc667" [ 2987.611340] env[61663]: _type = "Task" [ 2987.611340] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2987.618200] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a86001-071b-002a-c1d6-fbc3d86bc667, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2988.122349] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a86001-071b-002a-c1d6-fbc3d86bc667, 'name': SearchDatastore_Task, 'duration_secs': 0.008353} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2988.122667] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/85e635b0-17f0-4a4c-8b6c-b169f397e4d3 is no longer used. Deleting! [ 2988.122812] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/85e635b0-17f0-4a4c-8b6c-b169f397e4d3 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2988.123082] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-593fce53-c516-4764-a5b1-375146f825ca {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2988.129240] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2988.129240] env[61663]: value = "task-1690931" [ 2988.129240] env[61663]: _type = "Task" [ 2988.129240] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2988.136963] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690931, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2988.638800] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690931, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104304} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2988.639028] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2988.639203] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/85e635b0-17f0-4a4c-8b6c-b169f397e4d3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2988.639419] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/0bfeb2a5-7cb2-48f5-a73d-cd9a428d0490" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2988.639554] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/0bfeb2a5-7cb2-48f5-a73d-cd9a428d0490" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2988.639878] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/0bfeb2a5-7cb2-48f5-a73d-cd9a428d0490" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2988.640150] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ecc7bd0-c8e0-4bc8-9e0a-20b06be44d11 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2988.644175] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2988.644175] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52317527-e007-93df-6488-dedbc67c5cfb" [ 2988.644175] env[61663]: _type = "Task" [ 2988.644175] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2988.651555] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52317527-e007-93df-6488-dedbc67c5cfb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2989.154516] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52317527-e007-93df-6488-dedbc67c5cfb, 'name': SearchDatastore_Task, 'duration_secs': 0.008319} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2989.154887] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/0bfeb2a5-7cb2-48f5-a73d-cd9a428d0490/ts-2024-12-01-04-38-35 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2989.155045] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-982da2f3-cb37-4a33-ab04-c13884f9f56f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2989.166092] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/0bfeb2a5-7cb2-48f5-a73d-cd9a428d0490/ts-2024-12-01-04-38-35 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2989.166266] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 0bfeb2a5-7cb2-48f5-a73d-cd9a428d0490 is no longer used by this node. Pending deletion! [ 2989.166415] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/0bfeb2a5-7cb2-48f5-a73d-cd9a428d0490" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2989.166630] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/9e86e58c-0d05-49a6-8b25-facff2446110" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2989.166753] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/9e86e58c-0d05-49a6-8b25-facff2446110" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2989.167093] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/9e86e58c-0d05-49a6-8b25-facff2446110" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2989.167360] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ae6686f-1508-47cd-9aaf-5cba4d1b65a5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2989.171238] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2989.171238] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522ffec5-ede9-e67b-c2aa-fa454c5c50c6" [ 2989.171238] env[61663]: _type = "Task" [ 2989.171238] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2989.178180] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522ffec5-ede9-e67b-c2aa-fa454c5c50c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2989.682600] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522ffec5-ede9-e67b-c2aa-fa454c5c50c6, 'name': SearchDatastore_Task, 'duration_secs': 0.00871} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2989.682960] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/9e86e58c-0d05-49a6-8b25-facff2446110 is no longer used. Deleting! [ 2989.683134] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/9e86e58c-0d05-49a6-8b25-facff2446110 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2989.683397] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-299fc35a-e6d6-4714-8b34-0dd6f9793c0b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2989.689125] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2989.689125] env[61663]: value = "task-1690932" [ 2989.689125] env[61663]: _type = "Task" [ 2989.689125] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2989.696325] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690932, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2990.200131] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690932, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107007} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2990.200463] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2990.200641] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/9e86e58c-0d05-49a6-8b25-facff2446110" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2990.200949] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/d84a70de-6828-451e-b4ab-dc62d638ece0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2990.201171] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/d84a70de-6828-451e-b4ab-dc62d638ece0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2990.201528] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/d84a70de-6828-451e-b4ab-dc62d638ece0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2990.201852] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48d66edc-34f7-4e41-95ef-3fcee919200b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2990.206381] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2990.206381] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527c4992-e93d-786b-fa32-ceee36ed2ea0" [ 2990.206381] env[61663]: _type = "Task" [ 2990.206381] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2990.215431] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527c4992-e93d-786b-fa32-ceee36ed2ea0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2990.717414] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527c4992-e93d-786b-fa32-ceee36ed2ea0, 'name': SearchDatastore_Task, 'duration_secs': 0.008404} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2990.717772] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/d84a70de-6828-451e-b4ab-dc62d638ece0/ts-2024-12-01-04-38-36 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2990.718087] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-606af874-7d79-48b7-bc19-0de92307d95d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2990.735700] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/d84a70de-6828-451e-b4ab-dc62d638ece0/ts-2024-12-01-04-38-36 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2990.735921] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image d84a70de-6828-451e-b4ab-dc62d638ece0 is no longer used by this node. Pending deletion! [ 2990.736147] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/d84a70de-6828-451e-b4ab-dc62d638ece0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2990.736544] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/c4e08faa-bad2-4c35-9c93-50c4732584e4" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2990.736661] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/c4e08faa-bad2-4c35-9c93-50c4732584e4" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2990.737078] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/c4e08faa-bad2-4c35-9c93-50c4732584e4" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2990.737372] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed3af200-6d91-4026-be2c-929955ca328e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2990.742281] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2990.742281] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527a6765-318f-f747-3f0f-0b790254d215" [ 2990.742281] env[61663]: _type = "Task" [ 2990.742281] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2990.749429] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527a6765-318f-f747-3f0f-0b790254d215, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2991.253137] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527a6765-318f-f747-3f0f-0b790254d215, 'name': SearchDatastore_Task, 'duration_secs': 0.008011} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2991.253499] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/c4e08faa-bad2-4c35-9c93-50c4732584e4/ts-2024-12-01-04-38-37 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2991.253719] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45a36834-5425-4499-9167-d768ab0bd63a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2991.265120] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/c4e08faa-bad2-4c35-9c93-50c4732584e4/ts-2024-12-01-04-38-37 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2991.265334] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image c4e08faa-bad2-4c35-9c93-50c4732584e4 is no longer used by this node. Pending deletion! [ 2991.265533] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/c4e08faa-bad2-4c35-9c93-50c4732584e4" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2991.265778] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/6580f87e-0909-491b-9704-98df07738ec5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2991.265928] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/6580f87e-0909-491b-9704-98df07738ec5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2991.266295] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/6580f87e-0909-491b-9704-98df07738ec5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2991.266563] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f08236b8-7572-45a7-8cdb-56f7e07cb89b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2991.270459] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2991.270459] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c92fd5-2082-a875-ee20-132f4b974764" [ 2991.270459] env[61663]: _type = "Task" [ 2991.270459] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2991.277499] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c92fd5-2082-a875-ee20-132f4b974764, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2991.780654] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c92fd5-2082-a875-ee20-132f4b974764, 'name': SearchDatastore_Task, 'duration_secs': 0.00783} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2991.780863] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/6580f87e-0909-491b-9704-98df07738ec5/ts-2024-12-01-04-38-37 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2991.781163] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c128a0c7-fdbc-471a-8669-d646a647c206 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2991.793327] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/6580f87e-0909-491b-9704-98df07738ec5/ts-2024-12-01-04-38-37 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2991.793477] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 6580f87e-0909-491b-9704-98df07738ec5 is no longer used by this node. Pending deletion! [ 2991.793641] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/6580f87e-0909-491b-9704-98df07738ec5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2991.793864] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/8ec93660-8f43-4738-9c98-3de931b39e78" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2991.793985] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/8ec93660-8f43-4738-9c98-3de931b39e78" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2991.794347] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/8ec93660-8f43-4738-9c98-3de931b39e78" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2991.794606] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47e4703f-ff0f-42d8-9d8a-6ce4b07f545d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2991.798830] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2991.798830] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52035cf7-a0e3-a309-35b4-36ed491ad6a9" [ 2991.798830] env[61663]: _type = "Task" [ 2991.798830] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2991.806909] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52035cf7-a0e3-a309-35b4-36ed491ad6a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2992.308341] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52035cf7-a0e3-a309-35b4-36ed491ad6a9, 'name': SearchDatastore_Task, 'duration_secs': 0.008038} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2992.308660] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/8ec93660-8f43-4738-9c98-3de931b39e78/ts-2024-12-01-04-38-38 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2992.308870] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90be9545-7376-4541-972f-89d84f185ec2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2992.320680] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/8ec93660-8f43-4738-9c98-3de931b39e78/ts-2024-12-01-04-38-38 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2992.320827] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 8ec93660-8f43-4738-9c98-3de931b39e78 is no longer used by this node. Pending deletion! [ 2992.320982] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/8ec93660-8f43-4738-9c98-3de931b39e78" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2992.321219] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/6f852035-2c43-45e9-aea1-c144d367aff5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2992.321372] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/6f852035-2c43-45e9-aea1-c144d367aff5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2992.321712] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/6f852035-2c43-45e9-aea1-c144d367aff5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2992.321946] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90564e98-d578-4d45-b5fd-0667c425140b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2992.325885] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2992.325885] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526af19b-15ef-1c5b-6e75-9315b1c81722" [ 2992.325885] env[61663]: _type = "Task" [ 2992.325885] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2992.332957] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526af19b-15ef-1c5b-6e75-9315b1c81722, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2992.835853] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526af19b-15ef-1c5b-6e75-9315b1c81722, 'name': SearchDatastore_Task, 'duration_secs': 0.008002} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2992.836073] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/6f852035-2c43-45e9-aea1-c144d367aff5/ts-2024-12-01-04-38-38 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2992.836334] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f7fdd21c-e731-4159-81aa-7579d484e07a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2992.847479] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/6f852035-2c43-45e9-aea1-c144d367aff5/ts-2024-12-01-04-38-38 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2992.847648] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 6f852035-2c43-45e9-aea1-c144d367aff5 is no longer used by this node. Pending deletion! [ 2992.847780] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/6f852035-2c43-45e9-aea1-c144d367aff5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2992.847990] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/1184635d-f5f6-42d1-80ac-b5a20a46b430" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2992.848127] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/1184635d-f5f6-42d1-80ac-b5a20a46b430" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2992.848447] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/1184635d-f5f6-42d1-80ac-b5a20a46b430" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2992.848687] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f64a0402-a6bb-4efb-87d2-d230099bafd1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2992.852636] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2992.852636] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52cdc84f-5c7c-54ba-cdd8-157a7d7f80e4" [ 2992.852636] env[61663]: _type = "Task" [ 2992.852636] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2992.860366] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52cdc84f-5c7c-54ba-cdd8-157a7d7f80e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2993.363629] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52cdc84f-5c7c-54ba-cdd8-157a7d7f80e4, 'name': SearchDatastore_Task, 'duration_secs': 0.007593} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2993.364028] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/1184635d-f5f6-42d1-80ac-b5a20a46b430/ts-2024-12-01-04-38-39 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2993.364191] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af64ad62-f0eb-4707-8930-e7839f1442fd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2993.375475] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/1184635d-f5f6-42d1-80ac-b5a20a46b430/ts-2024-12-01-04-38-39 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2993.375627] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 1184635d-f5f6-42d1-80ac-b5a20a46b430 is no longer used by this node. Pending deletion! [ 2993.375773] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/1184635d-f5f6-42d1-80ac-b5a20a46b430" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2993.375987] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/06b6e007-a392-40ff-9785-38902a67f52a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2993.376195] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/06b6e007-a392-40ff-9785-38902a67f52a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2993.376517] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/06b6e007-a392-40ff-9785-38902a67f52a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2993.376752] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0e37a19-6eaf-46d5-896f-681094af09b7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2993.380952] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2993.380952] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525627cc-98b5-612d-d410-960c18024bb6" [ 2993.380952] env[61663]: _type = "Task" [ 2993.380952] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2993.387831] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525627cc-98b5-612d-d410-960c18024bb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2993.891272] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525627cc-98b5-612d-d410-960c18024bb6, 'name': SearchDatastore_Task, 'duration_secs': 0.007642} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2993.891614] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/06b6e007-a392-40ff-9785-38902a67f52a/ts-2024-12-01-04-38-39 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2993.891886] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-53df1f73-3a24-4ca3-819b-9de83dcd1f78 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2993.903279] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/06b6e007-a392-40ff-9785-38902a67f52a/ts-2024-12-01-04-38-39 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2993.903390] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 06b6e007-a392-40ff-9785-38902a67f52a is no longer used by this node. Pending deletion! [ 2993.903535] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/06b6e007-a392-40ff-9785-38902a67f52a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2993.903748] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/a154e8ef-8076-4d94-bd6c-998364f9dd33" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2993.903867] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/a154e8ef-8076-4d94-bd6c-998364f9dd33" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2993.904199] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/a154e8ef-8076-4d94-bd6c-998364f9dd33" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2993.904440] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c61f67fe-88ad-4d80-aed4-ad1b2aecb532 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2993.908468] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2993.908468] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a81390-3050-a509-d38d-836b9adc86a4" [ 2993.908468] env[61663]: _type = "Task" [ 2993.908468] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2993.915687] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a81390-3050-a509-d38d-836b9adc86a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2994.418891] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a81390-3050-a509-d38d-836b9adc86a4, 'name': SearchDatastore_Task, 'duration_secs': 0.007568} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2994.419255] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/a154e8ef-8076-4d94-bd6c-998364f9dd33/ts-2024-12-01-04-38-40 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2994.419426] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2446de5-f200-4f7c-b61b-89e5407e1ec2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2994.430167] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/a154e8ef-8076-4d94-bd6c-998364f9dd33/ts-2024-12-01-04-38-40 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2994.430328] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image a154e8ef-8076-4d94-bd6c-998364f9dd33 is no longer used by this node. Pending deletion! [ 2994.430463] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/a154e8ef-8076-4d94-bd6c-998364f9dd33" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2994.430685] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/32cc82c2-e0ad-43f7-92c5-1e02ba64def8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2994.430878] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/32cc82c2-e0ad-43f7-92c5-1e02ba64def8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2994.431215] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/32cc82c2-e0ad-43f7-92c5-1e02ba64def8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2994.431450] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee6b62e4-62ba-40de-a2f1-96bacbdedfc3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2994.435309] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2994.435309] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527cb4c9-db84-5386-1be8-185d2043a380" [ 2994.435309] env[61663]: _type = "Task" [ 2994.435309] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2994.442133] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527cb4c9-db84-5386-1be8-185d2043a380, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2994.946028] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527cb4c9-db84-5386-1be8-185d2043a380, 'name': SearchDatastore_Task, 'duration_secs': 0.007117} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2994.946380] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/32cc82c2-e0ad-43f7-92c5-1e02ba64def8/ts-2024-12-01-04-38-40 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2994.946642] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4af6f5ba-05fd-46e4-ba8d-1f0b7ddd6971 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2994.957901] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/32cc82c2-e0ad-43f7-92c5-1e02ba64def8/ts-2024-12-01-04-38-40 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2994.958063] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 32cc82c2-e0ad-43f7-92c5-1e02ba64def8 is no longer used by this node. Pending deletion! [ 2994.958244] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/32cc82c2-e0ad-43f7-92c5-1e02ba64def8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2994.958472] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/232e3e0b-526c-444f-bd7d-347d0b3bf0a1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2994.958605] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/232e3e0b-526c-444f-bd7d-347d0b3bf0a1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2994.958915] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/232e3e0b-526c-444f-bd7d-347d0b3bf0a1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2994.959174] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c77a41b5-8aa5-4aaa-b831-d5a89621e8a9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2994.963190] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2994.963190] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5215a7f6-3b12-5a51-8932-a205ddd68167" [ 2994.963190] env[61663]: _type = "Task" [ 2994.963190] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2994.970562] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5215a7f6-3b12-5a51-8932-a205ddd68167, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2995.473834] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5215a7f6-3b12-5a51-8932-a205ddd68167, 'name': SearchDatastore_Task, 'duration_secs': 0.008708} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2995.474195] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/232e3e0b-526c-444f-bd7d-347d0b3bf0a1 is no longer used. Deleting! [ 2995.474304] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/232e3e0b-526c-444f-bd7d-347d0b3bf0a1 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2995.474604] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-487d6f17-8cde-45aa-a99a-fbb46e385001 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2995.480534] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2995.480534] env[61663]: value = "task-1690933" [ 2995.480534] env[61663]: _type = "Task" [ 2995.480534] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2995.488288] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690933, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2995.990996] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690933, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100595} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2995.991222] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2995.991396] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/232e3e0b-526c-444f-bd7d-347d0b3bf0a1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2995.991619] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/3144c33b-2932-4a2a-8b37-1817882bd2c0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2995.991739] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/3144c33b-2932-4a2a-8b37-1817882bd2c0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2995.992077] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/3144c33b-2932-4a2a-8b37-1817882bd2c0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2995.992345] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0b70318-c9aa-492e-90cd-d0d2846baf06 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2995.996603] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2995.996603] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52760f40-9126-dfb1-d43e-2369fcf8684f" [ 2995.996603] env[61663]: _type = "Task" [ 2995.996603] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2996.003780] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52760f40-9126-dfb1-d43e-2369fcf8684f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2996.508110] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52760f40-9126-dfb1-d43e-2369fcf8684f, 'name': SearchDatastore_Task, 'duration_secs': 0.008518} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2996.508110] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/3144c33b-2932-4a2a-8b37-1817882bd2c0/ts-2024-12-01-04-38-42 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2996.508110] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8e6d6cb4-63c6-457f-b8fe-6b314165efc8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2996.518582] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/3144c33b-2932-4a2a-8b37-1817882bd2c0/ts-2024-12-01-04-38-42 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2996.518729] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 3144c33b-2932-4a2a-8b37-1817882bd2c0 is no longer used by this node. Pending deletion! [ 2996.518880] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/3144c33b-2932-4a2a-8b37-1817882bd2c0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2996.519109] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/e1f34ccd-056f-414b-a069-c54fcb610300" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2996.519231] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/e1f34ccd-056f-414b-a069-c54fcb610300" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2996.519544] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/e1f34ccd-056f-414b-a069-c54fcb610300" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2996.519776] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f75623c-0274-488c-8c51-4d9a1178d07f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2996.523750] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2996.523750] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]521aad1e-f069-9c06-2044-1e6c55971834" [ 2996.523750] env[61663]: _type = "Task" [ 2996.523750] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2996.530941] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]521aad1e-f069-9c06-2044-1e6c55971834, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2997.034017] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]521aad1e-f069-9c06-2044-1e6c55971834, 'name': SearchDatastore_Task, 'duration_secs': 0.007519} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2997.034523] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/e1f34ccd-056f-414b-a069-c54fcb610300/ts-2024-12-01-04-38-42 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2997.034593] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9fc8f57b-c2df-48f7-8529-fe7cfebd7fa9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2997.047238] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/e1f34ccd-056f-414b-a069-c54fcb610300/ts-2024-12-01-04-38-42 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2997.047238] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image e1f34ccd-056f-414b-a069-c54fcb610300 is no longer used by this node. Pending deletion! [ 2997.047238] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/e1f34ccd-056f-414b-a069-c54fcb610300" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2997.047238] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/a7cb11ff-3aaf-4d49-8ac7-9896a8f11f0a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2997.047475] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/a7cb11ff-3aaf-4d49-8ac7-9896a8f11f0a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2997.047691] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/a7cb11ff-3aaf-4d49-8ac7-9896a8f11f0a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2997.047951] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e6209c9-4341-4a17-b51f-81b2339f725d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2997.052380] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2997.052380] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525c3174-3225-71e2-0a4f-11e460f26b93" [ 2997.052380] env[61663]: _type = "Task" [ 2997.052380] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2997.059920] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525c3174-3225-71e2-0a4f-11e460f26b93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2997.563083] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525c3174-3225-71e2-0a4f-11e460f26b93, 'name': SearchDatastore_Task, 'duration_secs': 0.007526} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2997.563083] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/a7cb11ff-3aaf-4d49-8ac7-9896a8f11f0a/ts-2024-12-01-04-38-43 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2997.563508] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3308780e-e6b6-468e-b4b2-4b6639a5fdb9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2997.574616] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/a7cb11ff-3aaf-4d49-8ac7-9896a8f11f0a/ts-2024-12-01-04-38-43 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2997.574778] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image a7cb11ff-3aaf-4d49-8ac7-9896a8f11f0a is no longer used by this node. Pending deletion! [ 2997.574908] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/a7cb11ff-3aaf-4d49-8ac7-9896a8f11f0a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2997.575133] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/121de9a2-0f2d-476c-8d4f-3546d88f9a97" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2997.575262] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/121de9a2-0f2d-476c-8d4f-3546d88f9a97" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2997.575558] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/121de9a2-0f2d-476c-8d4f-3546d88f9a97" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2997.575780] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7cbc0dc9-52d2-4ad5-bd21-199f61655845 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2997.579924] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2997.579924] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528f6198-9a46-dbfe-9510-88d36ca3c114" [ 2997.579924] env[61663]: _type = "Task" [ 2997.579924] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2997.587284] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528f6198-9a46-dbfe-9510-88d36ca3c114, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2998.089790] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528f6198-9a46-dbfe-9510-88d36ca3c114, 'name': SearchDatastore_Task, 'duration_secs': 0.007498} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2998.090087] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/121de9a2-0f2d-476c-8d4f-3546d88f9a97/ts-2024-12-01-04-38-43 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2998.090346] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0045b35b-f4d1-4358-be7d-bb1a45e7d19f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2998.101483] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/121de9a2-0f2d-476c-8d4f-3546d88f9a97/ts-2024-12-01-04-38-43 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2998.101609] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 121de9a2-0f2d-476c-8d4f-3546d88f9a97 is no longer used by this node. Pending deletion! [ 2998.101778] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/121de9a2-0f2d-476c-8d4f-3546d88f9a97" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2998.101989] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/f368dd1b-1132-4151-8b96-237e1f2fcaaf" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2998.102124] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/f368dd1b-1132-4151-8b96-237e1f2fcaaf" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2998.102445] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/f368dd1b-1132-4151-8b96-237e1f2fcaaf" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2998.102679] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-072dc8b0-dd37-470a-bc37-b65ba7069415 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2998.106959] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2998.106959] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5286b255-6775-a6ee-a11f-d0dbd98c1e3e" [ 2998.106959] env[61663]: _type = "Task" [ 2998.106959] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2998.113772] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5286b255-6775-a6ee-a11f-d0dbd98c1e3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2998.617487] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5286b255-6775-a6ee-a11f-d0dbd98c1e3e, 'name': SearchDatastore_Task, 'duration_secs': 0.008207} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2998.617809] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/f368dd1b-1132-4151-8b96-237e1f2fcaaf/ts-2024-12-01-04-38-44 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2998.618057] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-77640700-3b03-409c-84e6-cc20018762fb {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2998.629351] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/f368dd1b-1132-4151-8b96-237e1f2fcaaf/ts-2024-12-01-04-38-44 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2998.629496] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image f368dd1b-1132-4151-8b96-237e1f2fcaaf is no longer used by this node. Pending deletion! [ 2998.629651] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/f368dd1b-1132-4151-8b96-237e1f2fcaaf" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2998.629866] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/2a2dfc37-8042-4e10-9f38-33b72b803caf" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2998.629985] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/2a2dfc37-8042-4e10-9f38-33b72b803caf" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2998.630308] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/2a2dfc37-8042-4e10-9f38-33b72b803caf" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2998.630534] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e414d841-eca1-4a38-a338-c024ed29c00a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2998.634261] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2998.634261] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522bef16-ec0d-1142-4dab-a5a186279739" [ 2998.634261] env[61663]: _type = "Task" [ 2998.634261] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2998.641086] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522bef16-ec0d-1142-4dab-a5a186279739, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2999.145461] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522bef16-ec0d-1142-4dab-a5a186279739, 'name': SearchDatastore_Task, 'duration_secs': 0.007616} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2999.145732] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/2a2dfc37-8042-4e10-9f38-33b72b803caf/ts-2024-12-01-04-38-44 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2999.145981] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80c161af-1a31-40ef-8143-a9fd8b36c13e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2999.157112] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/2a2dfc37-8042-4e10-9f38-33b72b803caf/ts-2024-12-01-04-38-44 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2999.157268] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 2a2dfc37-8042-4e10-9f38-33b72b803caf is no longer used by this node. Pending deletion! [ 2999.157434] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/2a2dfc37-8042-4e10-9f38-33b72b803caf" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2999.157659] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/450cf027-6a3a-43f1-b7b2-def0b2c49c3c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2999.157783] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/450cf027-6a3a-43f1-b7b2-def0b2c49c3c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2999.158110] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/450cf027-6a3a-43f1-b7b2-def0b2c49c3c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2999.158344] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-126b8da8-3b7d-450e-ac93-293f0e667282 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2999.162471] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2999.162471] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]521fc51e-e297-3992-db28-5e5c746bebc5" [ 2999.162471] env[61663]: _type = "Task" [ 2999.162471] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2999.169574] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]521fc51e-e297-3992-db28-5e5c746bebc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2999.672793] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]521fc51e-e297-3992-db28-5e5c746bebc5, 'name': SearchDatastore_Task, 'duration_secs': 0.007579} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2999.673127] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/450cf027-6a3a-43f1-b7b2-def0b2c49c3c/ts-2024-12-01-04-38-45 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2999.673366] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-454de5ce-cfa9-4ce5-83b0-958be596f011 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2999.685335] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/450cf027-6a3a-43f1-b7b2-def0b2c49c3c/ts-2024-12-01-04-38-45 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2999.685479] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 450cf027-6a3a-43f1-b7b2-def0b2c49c3c is no longer used by this node. Pending deletion! [ 2999.685694] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/450cf027-6a3a-43f1-b7b2-def0b2c49c3c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2999.685946] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/7502ce33-662a-4969-8e19-43f8ff7f2123" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2999.686123] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/7502ce33-662a-4969-8e19-43f8ff7f2123" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2999.686470] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/7502ce33-662a-4969-8e19-43f8ff7f2123" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2999.686714] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f80f7af3-b86d-4d19-95e4-162497de3ef9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2999.690804] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 2999.690804] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5205f92d-532b-80e0-1909-ff3d989b0636" [ 2999.690804] env[61663]: _type = "Task" [ 2999.690804] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2999.697840] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5205f92d-532b-80e0-1909-ff3d989b0636, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3000.200725] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5205f92d-532b-80e0-1909-ff3d989b0636, 'name': SearchDatastore_Task, 'duration_secs': 0.007779} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3000.200990] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/7502ce33-662a-4969-8e19-43f8ff7f2123/ts-2024-12-01-04-38-46 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3000.201300] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-79d48bf4-413b-4520-bf44-2122b31da4ef {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3000.212923] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/7502ce33-662a-4969-8e19-43f8ff7f2123/ts-2024-12-01-04-38-46 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3000.213095] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 7502ce33-662a-4969-8e19-43f8ff7f2123 is no longer used by this node. Pending deletion! [ 3000.213249] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/7502ce33-662a-4969-8e19-43f8ff7f2123" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3000.213461] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/403fec99-4b23-46d8-acac-732262099a51" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3000.213627] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/403fec99-4b23-46d8-acac-732262099a51" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3000.213945] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/403fec99-4b23-46d8-acac-732262099a51" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3000.214190] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-efdd419d-28df-416f-a417-52e2c8c83357 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3000.218480] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3000.218480] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]521b3a43-e0e9-23cd-5149-5f6c14af6adc" [ 3000.218480] env[61663]: _type = "Task" [ 3000.218480] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3000.225689] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]521b3a43-e0e9-23cd-5149-5f6c14af6adc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3000.730715] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]521b3a43-e0e9-23cd-5149-5f6c14af6adc, 'name': SearchDatastore_Task, 'duration_secs': 0.008872} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3000.731126] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/403fec99-4b23-46d8-acac-732262099a51 is no longer used. Deleting! [ 3000.731223] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/403fec99-4b23-46d8-acac-732262099a51 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3000.731495] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c5ad52b-a33f-4026-9ed6-b9f8b2c71f3d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3000.737533] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3000.737533] env[61663]: value = "task-1690934" [ 3000.737533] env[61663]: _type = "Task" [ 3000.737533] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3000.745017] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690934, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3001.247810] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690934, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.112303} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3001.248051] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3001.248234] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/403fec99-4b23-46d8-acac-732262099a51" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3001.248465] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/3c8ee36e-c0bc-4d10-8452-83605a26facf" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3001.248640] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/3c8ee36e-c0bc-4d10-8452-83605a26facf" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3001.248994] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/3c8ee36e-c0bc-4d10-8452-83605a26facf" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3001.249278] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1c600a2-3fdf-461e-91e8-5fa15705f6e1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3001.253824] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3001.253824] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528eb045-0650-c367-f5a9-a7db28bd92fc" [ 3001.253824] env[61663]: _type = "Task" [ 3001.253824] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3001.261196] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528eb045-0650-c367-f5a9-a7db28bd92fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3001.764553] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528eb045-0650-c367-f5a9-a7db28bd92fc, 'name': SearchDatastore_Task, 'duration_secs': 0.008984} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3001.764882] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/3c8ee36e-c0bc-4d10-8452-83605a26facf is no longer used. Deleting! [ 3001.765035] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/3c8ee36e-c0bc-4d10-8452-83605a26facf {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3001.765304] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c802a660-2c53-4754-997f-ee348d2105ae {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3001.773057] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3001.773057] env[61663]: value = "task-1690935" [ 3001.773057] env[61663]: _type = "Task" [ 3001.773057] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3001.780532] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690935, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3001.962556] env[61663]: WARNING oslo_vmware.rw_handles [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 3001.962556] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 3001.962556] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 3001.962556] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 3001.962556] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 3001.962556] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 3001.962556] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 3001.962556] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 3001.962556] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 3001.962556] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 3001.962556] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 3001.962556] env[61663]: ERROR oslo_vmware.rw_handles [ 3001.963019] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/3891ba52-e8e5-4634-8ef9-b08fedf38012/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 3001.964997] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 3001.965263] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Copying Virtual Disk [datastore1] vmware_temp/3891ba52-e8e5-4634-8ef9-b08fedf38012/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/3891ba52-e8e5-4634-8ef9-b08fedf38012/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 3001.965545] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8d1b9c8e-7582-4609-bf72-e2e7a83935cc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3001.972496] env[61663]: DEBUG oslo_vmware.api [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Waiting for the task: (returnval){ [ 3001.972496] env[61663]: value = "task-1690936" [ 3001.972496] env[61663]: _type = "Task" [ 3001.972496] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3001.980490] env[61663]: DEBUG oslo_vmware.api [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Task: {'id': task-1690936, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3002.282817] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690935, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096936} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3002.283058] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3002.283204] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/3c8ee36e-c0bc-4d10-8452-83605a26facf" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3002.283446] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/bbfa3bda-2bf1-4ea8-9919-6b323b658d4b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3002.283583] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/bbfa3bda-2bf1-4ea8-9919-6b323b658d4b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3002.283915] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/bbfa3bda-2bf1-4ea8-9919-6b323b658d4b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3002.284229] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12010c59-c116-4854-bb28-3a9436aa7564 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3002.288388] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3002.288388] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527b9b43-3cca-4934-af11-17095543e4ba" [ 3002.288388] env[61663]: _type = "Task" [ 3002.288388] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3002.296009] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527b9b43-3cca-4934-af11-17095543e4ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3002.483233] env[61663]: DEBUG oslo_vmware.exceptions [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 3002.483528] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3002.484100] env[61663]: ERROR nova.compute.manager [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3002.484100] env[61663]: Faults: ['InvalidArgument'] [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Traceback (most recent call last): [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] yield resources [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] self.driver.spawn(context, instance, image_meta, [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] self._fetch_image_if_missing(context, vi) [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] image_cache(vi, tmp_image_ds_loc) [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] vm_util.copy_virtual_disk( [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] session._wait_for_task(vmdk_copy_task) [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] return self.wait_for_task(task_ref) [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] return evt.wait() [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] result = hub.switch() [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] return self.greenlet.switch() [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] self.f(*self.args, **self.kw) [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] raise exceptions.translate_fault(task_info.error) [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Faults: ['InvalidArgument'] [ 3002.484100] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] [ 3002.485145] env[61663]: INFO nova.compute.manager [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Terminating instance [ 3002.485974] env[61663]: DEBUG oslo_concurrency.lockutils [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3002.487197] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3002.487866] env[61663]: DEBUG nova.compute.manager [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 3002.488082] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 3002.488321] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-99c1a36e-5f66-4998-a2e9-bf59ac2d5d5b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3002.490621] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde1f34d-9b65-460b-879f-653369a49de7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3002.497601] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 3002.497733] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cd04eaf2-3511-4dda-a51d-f81a5081ff1e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3002.499880] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3002.500069] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 3002.501062] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad1826e3-fe57-4dfb-80f7-8ad5a15e0ff0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3002.505471] env[61663]: DEBUG oslo_vmware.api [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Waiting for the task: (returnval){ [ 3002.505471] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b9a945-286d-b64e-637d-39e32dceee22" [ 3002.505471] env[61663]: _type = "Task" [ 3002.505471] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3002.512445] env[61663]: DEBUG oslo_vmware.api [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b9a945-286d-b64e-637d-39e32dceee22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3002.564032] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 3002.564217] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 3002.564441] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Deleting the datastore file [datastore1] 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3002.564650] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-217ba48c-ce08-4c6b-9838-d2339156f0be {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3002.570728] env[61663]: DEBUG oslo_vmware.api [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Waiting for the task: (returnval){ [ 3002.570728] env[61663]: value = "task-1690938" [ 3002.570728] env[61663]: _type = "Task" [ 3002.570728] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3002.578086] env[61663]: DEBUG oslo_vmware.api [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Task: {'id': task-1690938, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3002.798831] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527b9b43-3cca-4934-af11-17095543e4ba, 'name': SearchDatastore_Task, 'duration_secs': 0.008648} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3002.799165] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/bbfa3bda-2bf1-4ea8-9919-6b323b658d4b is no longer used. Deleting! [ 3002.799312] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/bbfa3bda-2bf1-4ea8-9919-6b323b658d4b {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3002.799567] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e439db63-fd16-4ae3-bf66-0c71b6d9b2b8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3002.805363] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3002.805363] env[61663]: value = "task-1690939" [ 3002.805363] env[61663]: _type = "Task" [ 3002.805363] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3002.812926] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690939, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3003.015068] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 3003.015325] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Creating directory with path [datastore1] vmware_temp/a0f63704-ac9c-46bf-a203-ed955d967355/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3003.015586] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-864df3fb-e918-4485-9964-0c61b4d7b6cf {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3003.025737] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Created directory with path [datastore1] vmware_temp/a0f63704-ac9c-46bf-a203-ed955d967355/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3003.025928] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Fetch image to [datastore1] vmware_temp/a0f63704-ac9c-46bf-a203-ed955d967355/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 3003.026114] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/a0f63704-ac9c-46bf-a203-ed955d967355/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 3003.026810] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0de3e73-1d16-48e4-a4fa-799013bbe0c6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3003.033274] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fac500f-794e-437d-ad18-b9d24aefdc2f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3003.042040] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4efd17c7-ad8a-48a2-a038-aeb8cc5699a1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3003.076042] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e7f299e-52a2-4588-8f75-e5a095b9aa75 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3003.083066] env[61663]: DEBUG oslo_vmware.api [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Task: {'id': task-1690938, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.061342} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3003.084501] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3003.084695] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 3003.084868] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 3003.085056] env[61663]: INFO nova.compute.manager [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 3003.086813] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-75343627-773c-43ad-8d1c-68b69342da1b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3003.088660] env[61663]: DEBUG nova.compute.claims [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 3003.088828] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3003.089049] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3003.112626] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 3003.165019] env[61663]: DEBUG oslo_vmware.rw_handles [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a0f63704-ac9c-46bf-a203-ed955d967355/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 3003.222227] env[61663]: DEBUG nova.scheduler.client.report [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Refreshing inventories for resource provider b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 3003.226220] env[61663]: DEBUG oslo_vmware.rw_handles [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 3003.226395] env[61663]: DEBUG oslo_vmware.rw_handles [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a0f63704-ac9c-46bf-a203-ed955d967355/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 3003.237937] env[61663]: DEBUG nova.scheduler.client.report [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Updating ProviderTree inventory for provider b47d006d-a9bd-461e-a5d9-39811f005278 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 3003.238163] env[61663]: DEBUG nova.compute.provider_tree [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Updating inventory in ProviderTree for provider b47d006d-a9bd-461e-a5d9-39811f005278 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 3003.250515] env[61663]: DEBUG nova.scheduler.client.report [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Refreshing aggregate associations for resource provider b47d006d-a9bd-461e-a5d9-39811f005278, aggregates: None {{(pid=61663) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 3003.267319] env[61663]: DEBUG nova.scheduler.client.report [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Refreshing trait associations for resource provider b47d006d-a9bd-461e-a5d9-39811f005278, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61663) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 3003.315182] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690939, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103114} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3003.317324] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3003.317533] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/bbfa3bda-2bf1-4ea8-9919-6b323b658d4b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3003.317766] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/40c1579b-5ba2-4ab0-94bf-b33409730f3f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3003.317891] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/40c1579b-5ba2-4ab0-94bf-b33409730f3f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3003.318235] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/40c1579b-5ba2-4ab0-94bf-b33409730f3f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3003.318801] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acd7d4f0-4b42-4f28-8e6d-a33be6264774 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3003.322976] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3003.322976] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f01303-f4c1-f9a8-887d-df45f4402f17" [ 3003.322976] env[61663]: _type = "Task" [ 3003.322976] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3003.332195] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f01303-f4c1-f9a8-887d-df45f4402f17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3003.387165] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b16ca557-eee8-47a6-bb4d-f31c3b353f35 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3003.393788] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7adc62-217e-4087-a72b-05f24ac3bf01 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3003.422660] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0255747-5b6e-4ae4-9ddc-cb75087a6522 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3003.429164] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb94866-b0ff-4f01-9826-5caab152fc6e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3003.441465] env[61663]: DEBUG nova.compute.provider_tree [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 3003.450876] env[61663]: DEBUG nova.scheduler.client.report [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 3003.465755] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.377s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3003.466278] env[61663]: ERROR nova.compute.manager [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3003.466278] env[61663]: Faults: ['InvalidArgument'] [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Traceback (most recent call last): [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] self.driver.spawn(context, instance, image_meta, [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] self._fetch_image_if_missing(context, vi) [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] image_cache(vi, tmp_image_ds_loc) [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] vm_util.copy_virtual_disk( [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] session._wait_for_task(vmdk_copy_task) [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] return self.wait_for_task(task_ref) [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] return evt.wait() [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] result = hub.switch() [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] return self.greenlet.switch() [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] self.f(*self.args, **self.kw) [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] raise exceptions.translate_fault(task_info.error) [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Faults: ['InvalidArgument'] [ 3003.466278] env[61663]: ERROR nova.compute.manager [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] [ 3003.467076] env[61663]: DEBUG nova.compute.utils [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 3003.468402] env[61663]: DEBUG nova.compute.manager [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Build of instance 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e was re-scheduled: A specified parameter was not correct: fileType [ 3003.468402] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 3003.468783] env[61663]: DEBUG nova.compute.manager [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 3003.468959] env[61663]: DEBUG nova.compute.manager [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 3003.469146] env[61663]: DEBUG nova.compute.manager [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 3003.469314] env[61663]: DEBUG nova.network.neutron [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 3003.780564] env[61663]: DEBUG nova.network.neutron [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3003.794954] env[61663]: INFO nova.compute.manager [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Took 0.33 seconds to deallocate network for instance. [ 3003.834081] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f01303-f4c1-f9a8-887d-df45f4402f17, 'name': SearchDatastore_Task, 'duration_secs': 0.008453} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3003.838018] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/40c1579b-5ba2-4ab0-94bf-b33409730f3f/ts-2024-12-01-04-38-49 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3003.838152] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-835f9d3b-7855-4645-a205-ec0051cbc1ca {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3003.849972] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/40c1579b-5ba2-4ab0-94bf-b33409730f3f/ts-2024-12-01-04-38-49 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3003.850142] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 40c1579b-5ba2-4ab0-94bf-b33409730f3f is no longer used by this node. Pending deletion! [ 3003.850308] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/40c1579b-5ba2-4ab0-94bf-b33409730f3f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3003.850528] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/54ac7dea-0252-4ab3-bca0-401563ab58c3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3003.850651] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/54ac7dea-0252-4ab3-bca0-401563ab58c3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3003.850979] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/54ac7dea-0252-4ab3-bca0-401563ab58c3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3003.851250] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6a794f3-c3cf-460c-a396-c9f3790773e4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3003.856378] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3003.856378] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5209aa15-3603-cc72-5fdb-8ebd01bca2dd" [ 3003.856378] env[61663]: _type = "Task" [ 3003.856378] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3003.864100] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5209aa15-3603-cc72-5fdb-8ebd01bca2dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3003.893035] env[61663]: INFO nova.scheduler.client.report [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Deleted allocations for instance 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e [ 3003.917983] env[61663]: DEBUG oslo_concurrency.lockutils [None req-f0d48429-3ff3-4392-87d9-4f0606bf7036 tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Lock "5bc71c5f-20d1-47bf-ac1e-9b32c32b198e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 678.024s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3003.918257] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a081b1bf-e9f7-4748-974c-ffadcd8bef6a tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Lock "5bc71c5f-20d1-47bf-ac1e-9b32c32b198e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 482.190s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3003.918495] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a081b1bf-e9f7-4748-974c-ffadcd8bef6a tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Acquiring lock "5bc71c5f-20d1-47bf-ac1e-9b32c32b198e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3003.918703] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a081b1bf-e9f7-4748-974c-ffadcd8bef6a tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Lock "5bc71c5f-20d1-47bf-ac1e-9b32c32b198e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3003.918873] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a081b1bf-e9f7-4748-974c-ffadcd8bef6a tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Lock "5bc71c5f-20d1-47bf-ac1e-9b32c32b198e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3003.921360] env[61663]: INFO nova.compute.manager [None req-a081b1bf-e9f7-4748-974c-ffadcd8bef6a tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Terminating instance [ 3003.923135] env[61663]: DEBUG nova.compute.manager [None req-a081b1bf-e9f7-4748-974c-ffadcd8bef6a tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 3003.924099] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-a081b1bf-e9f7-4748-974c-ffadcd8bef6a tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 3003.924099] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-34e6c5db-d660-4c8d-98da-8929416f6bbe {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3003.932904] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c9bbe6b-e21d-427e-b237-42b1c27ac7fe {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3003.960960] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-a081b1bf-e9f7-4748-974c-ffadcd8bef6a tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e could not be found. [ 3003.961182] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-a081b1bf-e9f7-4748-974c-ffadcd8bef6a tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 3003.961369] env[61663]: INFO nova.compute.manager [None req-a081b1bf-e9f7-4748-974c-ffadcd8bef6a tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 3003.961631] env[61663]: DEBUG oslo.service.loopingcall [None req-a081b1bf-e9f7-4748-974c-ffadcd8bef6a tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 3003.961856] env[61663]: DEBUG nova.compute.manager [-] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 3003.961956] env[61663]: DEBUG nova.network.neutron [-] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 3004.004545] env[61663]: DEBUG nova.network.neutron [-] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3004.013815] env[61663]: INFO nova.compute.manager [-] [instance: 5bc71c5f-20d1-47bf-ac1e-9b32c32b198e] Took 0.05 seconds to deallocate network for instance. [ 3004.120990] env[61663]: DEBUG oslo_concurrency.lockutils [None req-a081b1bf-e9f7-4748-974c-ffadcd8bef6a tempest-AttachVolumeShelveTestJSON-1596508632 tempest-AttachVolumeShelveTestJSON-1596508632-project-member] Lock "5bc71c5f-20d1-47bf-ac1e-9b32c32b198e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.203s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3004.366821] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5209aa15-3603-cc72-5fdb-8ebd01bca2dd, 'name': SearchDatastore_Task, 'duration_secs': 0.00795} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3004.367073] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/54ac7dea-0252-4ab3-bca0-401563ab58c3/ts-2024-12-01-04-38-50 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3004.367343] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90eb2410-96bd-4fc4-acef-e25a9dd0d9f4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3004.378384] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/54ac7dea-0252-4ab3-bca0-401563ab58c3/ts-2024-12-01-04-38-50 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3004.378502] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 54ac7dea-0252-4ab3-bca0-401563ab58c3 is no longer used by this node. Pending deletion! [ 3004.378675] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/54ac7dea-0252-4ab3-bca0-401563ab58c3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3004.378912] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/3c3bf6fb-95b0-4b3e-9601-9c0b57a23077" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3004.379046] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/3c3bf6fb-95b0-4b3e-9601-9c0b57a23077" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3004.379367] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/3c3bf6fb-95b0-4b3e-9601-9c0b57a23077" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3004.379592] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f697ec78-8213-41d8-b285-9fb2267a72a9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3004.384906] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3004.384906] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52cbd0e5-e39e-3a8c-c13f-6dda1a12ba5c" [ 3004.384906] env[61663]: _type = "Task" [ 3004.384906] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3004.392009] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52cbd0e5-e39e-3a8c-c13f-6dda1a12ba5c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3004.899102] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52cbd0e5-e39e-3a8c-c13f-6dda1a12ba5c, 'name': SearchDatastore_Task, 'duration_secs': 0.007695} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3004.899487] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/3c3bf6fb-95b0-4b3e-9601-9c0b57a23077/ts-2024-12-01-04-38-50 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3004.899833] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ab088d35-8f5f-4ad4-9fe6-117f38d699dd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3004.911867] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/3c3bf6fb-95b0-4b3e-9601-9c0b57a23077/ts-2024-12-01-04-38-50 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3004.912085] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 3c3bf6fb-95b0-4b3e-9601-9c0b57a23077 is no longer used by this node. Pending deletion! [ 3004.912318] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/3c3bf6fb-95b0-4b3e-9601-9c0b57a23077" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3004.912635] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/0eea9097-ac15-4d3c-860f-8f565557e581" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3004.912818] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/0eea9097-ac15-4d3c-860f-8f565557e581" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3004.913267] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/0eea9097-ac15-4d3c-860f-8f565557e581" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3004.913577] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-795d6559-c90d-43ea-9a90-3263be4871e8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3004.918491] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3004.918491] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522c5258-9d09-1d5f-a474-596c67ec008a" [ 3004.918491] env[61663]: _type = "Task" [ 3004.918491] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3004.925664] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522c5258-9d09-1d5f-a474-596c67ec008a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3005.428535] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522c5258-9d09-1d5f-a474-596c67ec008a, 'name': SearchDatastore_Task, 'duration_secs': 0.007261} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3005.428835] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/0eea9097-ac15-4d3c-860f-8f565557e581/ts-2024-12-01-04-38-51 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3005.429110] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4709ac5f-e70f-4b9f-80db-eacefa94071c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3005.439798] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/0eea9097-ac15-4d3c-860f-8f565557e581/ts-2024-12-01-04-38-51 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3005.439943] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 0eea9097-ac15-4d3c-860f-8f565557e581 is no longer used by this node. Pending deletion! [ 3005.440130] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/0eea9097-ac15-4d3c-860f-8f565557e581" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3005.440343] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/7b45bdc3-1cbe-4f06-aaa5-0e067ebe68df" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3005.440464] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/7b45bdc3-1cbe-4f06-aaa5-0e067ebe68df" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3005.440783] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/7b45bdc3-1cbe-4f06-aaa5-0e067ebe68df" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3005.441012] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4c9e9b5-bcf3-43c1-9c92-f44a04388391 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3005.444784] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3005.444784] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520c0891-e376-7aea-b6b5-6c4e61e38101" [ 3005.444784] env[61663]: _type = "Task" [ 3005.444784] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3005.451764] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520c0891-e376-7aea-b6b5-6c4e61e38101, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3005.955637] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520c0891-e376-7aea-b6b5-6c4e61e38101, 'name': SearchDatastore_Task, 'duration_secs': 0.007207} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3005.955985] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/7b45bdc3-1cbe-4f06-aaa5-0e067ebe68df/ts-2024-12-01-04-38-51 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3005.956166] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee72556c-eb28-4a36-befc-1317d8dbd8a1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3005.967229] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/7b45bdc3-1cbe-4f06-aaa5-0e067ebe68df/ts-2024-12-01-04-38-51 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3005.967463] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 7b45bdc3-1cbe-4f06-aaa5-0e067ebe68df is no longer used by this node. Pending deletion! [ 3005.967593] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/7b45bdc3-1cbe-4f06-aaa5-0e067ebe68df" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3005.967744] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/6826a1f2-be5e-4daf-ba8c-33463580325c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3005.967865] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/6826a1f2-be5e-4daf-ba8c-33463580325c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3005.968196] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/6826a1f2-be5e-4daf-ba8c-33463580325c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3005.968437] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c45081d-78c9-44fa-8de1-d89a43b148b7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3005.972560] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3005.972560] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a5eca4-650b-345c-8793-82fe0b6b989e" [ 3005.972560] env[61663]: _type = "Task" [ 3005.972560] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3005.979787] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a5eca4-650b-345c-8793-82fe0b6b989e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3006.483784] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a5eca4-650b-345c-8793-82fe0b6b989e, 'name': SearchDatastore_Task, 'duration_secs': 0.007463} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3006.484078] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/6826a1f2-be5e-4daf-ba8c-33463580325c/ts-2024-12-01-04-38-52 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3006.484353] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8fde531-921b-4bbf-9a7b-3ee49d2ffadf {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3006.495587] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/6826a1f2-be5e-4daf-ba8c-33463580325c/ts-2024-12-01-04-38-52 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3006.495733] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 6826a1f2-be5e-4daf-ba8c-33463580325c is no longer used by this node. Pending deletion! [ 3006.495894] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/6826a1f2-be5e-4daf-ba8c-33463580325c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3006.496157] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/a9e6b515-4e06-4fcf-a8be-93594c1ab280" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3006.496238] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/a9e6b515-4e06-4fcf-a8be-93594c1ab280" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3006.496542] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/a9e6b515-4e06-4fcf-a8be-93594c1ab280" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3006.496780] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9366e75f-be45-44af-a2f9-d871e0455187 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3006.500835] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3006.500835] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]523cb3ab-2f35-9b5c-2a79-7e47b051fffe" [ 3006.500835] env[61663]: _type = "Task" [ 3006.500835] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3006.508075] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]523cb3ab-2f35-9b5c-2a79-7e47b051fffe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3007.011514] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]523cb3ab-2f35-9b5c-2a79-7e47b051fffe, 'name': SearchDatastore_Task, 'duration_secs': 0.007436} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3007.011890] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/a9e6b515-4e06-4fcf-a8be-93594c1ab280/ts-2024-12-01-04-38-52 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3007.012042] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6da02f5-1221-415a-9956-2fdc6e31d36c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3007.023159] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/a9e6b515-4e06-4fcf-a8be-93594c1ab280/ts-2024-12-01-04-38-52 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3007.023309] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image a9e6b515-4e06-4fcf-a8be-93594c1ab280 is no longer used by this node. Pending deletion! [ 3007.023457] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/a9e6b515-4e06-4fcf-a8be-93594c1ab280" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3007.023666] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/ba787b2a-d4f0-4994-a799-9c3cd5a97692" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3007.023786] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/ba787b2a-d4f0-4994-a799-9c3cd5a97692" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3007.024115] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/ba787b2a-d4f0-4994-a799-9c3cd5a97692" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3007.024345] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d94bb084-7672-4648-8f82-2a3627382d95 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3007.028247] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3007.028247] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]524b12e8-ed93-3f2c-bcb9-010dc17c7d94" [ 3007.028247] env[61663]: _type = "Task" [ 3007.028247] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3007.035031] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]524b12e8-ed93-3f2c-bcb9-010dc17c7d94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3007.538734] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]524b12e8-ed93-3f2c-bcb9-010dc17c7d94, 'name': SearchDatastore_Task, 'duration_secs': 0.007254} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3007.539058] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/ba787b2a-d4f0-4994-a799-9c3cd5a97692/ts-2024-12-01-04-38-53 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3007.539483] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b645e259-ed75-4351-81fc-f9a944221476 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3007.550811] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/ba787b2a-d4f0-4994-a799-9c3cd5a97692/ts-2024-12-01-04-38-53 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3007.550952] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image ba787b2a-d4f0-4994-a799-9c3cd5a97692 is no longer used by this node. Pending deletion! [ 3007.551132] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/ba787b2a-d4f0-4994-a799-9c3cd5a97692" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3007.551346] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/385f6770-bea0-43e8-9cac-73487b01bd08" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3007.551466] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/385f6770-bea0-43e8-9cac-73487b01bd08" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3007.551773] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/385f6770-bea0-43e8-9cac-73487b01bd08" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3007.552024] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f98ca6c9-c653-44da-9c9b-a525ec46c1ff {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3007.563019] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3007.563019] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b78cd4-fe18-b4a5-2b52-03019b8d378e" [ 3007.563019] env[61663]: _type = "Task" [ 3007.563019] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3007.565094] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b78cd4-fe18-b4a5-2b52-03019b8d378e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3008.067321] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b78cd4-fe18-b4a5-2b52-03019b8d378e, 'name': SearchDatastore_Task, 'duration_secs': 0.007618} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3008.067794] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/385f6770-bea0-43e8-9cac-73487b01bd08/ts-2024-12-01-04-38-53 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3008.067873] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-15485a2e-d570-4dc4-92ae-2bae2bfaf819 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3008.078818] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/385f6770-bea0-43e8-9cac-73487b01bd08/ts-2024-12-01-04-38-53 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3008.079034] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 385f6770-bea0-43e8-9cac-73487b01bd08 is no longer used by this node. Pending deletion! [ 3008.079130] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/385f6770-bea0-43e8-9cac-73487b01bd08" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3008.079348] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/910eefcb-83b0-4640-bab0-715c6126b8dc" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3008.079466] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/910eefcb-83b0-4640-bab0-715c6126b8dc" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3008.079778] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/910eefcb-83b0-4640-bab0-715c6126b8dc" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3008.080073] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fe65c00-04be-4a2c-be2b-31b7a093b9d7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3008.084223] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3008.084223] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529f15be-eae7-4e3c-d658-379da4006ba4" [ 3008.084223] env[61663]: _type = "Task" [ 3008.084223] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3008.091133] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529f15be-eae7-4e3c-d658-379da4006ba4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3008.594356] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529f15be-eae7-4e3c-d658-379da4006ba4, 'name': SearchDatastore_Task, 'duration_secs': 0.007257} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3008.594639] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/910eefcb-83b0-4640-bab0-715c6126b8dc/ts-2024-12-01-04-38-54 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3008.594902] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-084f9395-0d9e-40bc-a5eb-ae8dae146e4a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3008.657901] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/910eefcb-83b0-4640-bab0-715c6126b8dc/ts-2024-12-01-04-38-54 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3008.658089] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 910eefcb-83b0-4640-bab0-715c6126b8dc is no longer used by this node. Pending deletion! [ 3008.658181] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/910eefcb-83b0-4640-bab0-715c6126b8dc" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3008.658478] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/ee22b456-5427-4428-af0b-da0633648b40" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3008.658623] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/ee22b456-5427-4428-af0b-da0633648b40" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3008.658939] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/ee22b456-5427-4428-af0b-da0633648b40" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3008.659249] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d062ad9a-69cf-45e9-af18-f0e9e9630d46 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3008.664123] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3008.664123] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5281371f-a4cd-3e28-67cc-e0de8e4c27b7" [ 3008.664123] env[61663]: _type = "Task" [ 3008.664123] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3008.672199] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5281371f-a4cd-3e28-67cc-e0de8e4c27b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3009.175362] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5281371f-a4cd-3e28-67cc-e0de8e4c27b7, 'name': SearchDatastore_Task, 'duration_secs': 0.010498} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3009.175786] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/ee22b456-5427-4428-af0b-da0633648b40/ts-2024-12-01-04-38-55 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3009.175881] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eb57ec7b-2c13-4ffc-a248-a5609f4193e6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3009.187282] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/ee22b456-5427-4428-af0b-da0633648b40/ts-2024-12-01-04-38-55 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3009.187454] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image ee22b456-5427-4428-af0b-da0633648b40 is no longer used by this node. Pending deletion! [ 3009.187578] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/ee22b456-5427-4428-af0b-da0633648b40" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3009.187812] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/8b8bcca5-ef29-4f01-8178-76bf01cf3baa" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3009.187932] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/8b8bcca5-ef29-4f01-8178-76bf01cf3baa" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3009.188254] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/8b8bcca5-ef29-4f01-8178-76bf01cf3baa" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3009.188478] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-227405a7-e8e9-411f-94ab-2a0040c10250 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3009.192435] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3009.192435] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5253299c-038a-eac4-d273-ab59861dd943" [ 3009.192435] env[61663]: _type = "Task" [ 3009.192435] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3009.199382] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5253299c-038a-eac4-d273-ab59861dd943, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3009.703680] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5253299c-038a-eac4-d273-ab59861dd943, 'name': SearchDatastore_Task, 'duration_secs': 0.007603} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3009.703957] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/8b8bcca5-ef29-4f01-8178-76bf01cf3baa/ts-2024-12-01-04-38-55 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3009.704251] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c844aa59-5025-4563-ad50-e1118c4a3cff {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3009.716240] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/8b8bcca5-ef29-4f01-8178-76bf01cf3baa/ts-2024-12-01-04-38-55 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3009.716403] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 8b8bcca5-ef29-4f01-8178-76bf01cf3baa is no longer used by this node. Pending deletion! [ 3009.716547] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/8b8bcca5-ef29-4f01-8178-76bf01cf3baa" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3009.716765] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/78b0977c-33d1-40c7-836f-08f17ea59460" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3009.716886] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/78b0977c-33d1-40c7-836f-08f17ea59460" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3009.717243] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/78b0977c-33d1-40c7-836f-08f17ea59460" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3009.717507] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a376497-9f57-4863-88f6-9e7fbf90cb5a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3009.721744] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3009.721744] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f69081-24d3-3ba9-e577-a4f0c2e4975e" [ 3009.721744] env[61663]: _type = "Task" [ 3009.721744] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3009.728891] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f69081-24d3-3ba9-e577-a4f0c2e4975e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3010.231987] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f69081-24d3-3ba9-e577-a4f0c2e4975e, 'name': SearchDatastore_Task, 'duration_secs': 0.007613} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3010.232353] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/78b0977c-33d1-40c7-836f-08f17ea59460/ts-2024-12-01-04-38-56 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3010.232482] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9027dcfd-e34e-4ed5-9ec4-fd648ed8f441 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3010.243990] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/78b0977c-33d1-40c7-836f-08f17ea59460/ts-2024-12-01-04-38-56 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3010.244191] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 78b0977c-33d1-40c7-836f-08f17ea59460 is no longer used by this node. Pending deletion! [ 3010.244309] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/78b0977c-33d1-40c7-836f-08f17ea59460" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3010.244522] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/dbe98cf8-30c7-4c58-9b20-e13338cfebc8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3010.244672] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/dbe98cf8-30c7-4c58-9b20-e13338cfebc8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3010.244958] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/dbe98cf8-30c7-4c58-9b20-e13338cfebc8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3010.245206] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f38bcef5-d90b-4583-961b-edd0f4ac47e1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3010.249569] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3010.249569] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5210985c-76ca-0906-e68e-bf6a673baf1d" [ 3010.249569] env[61663]: _type = "Task" [ 3010.249569] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3010.256656] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5210985c-76ca-0906-e68e-bf6a673baf1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3010.760192] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5210985c-76ca-0906-e68e-bf6a673baf1d, 'name': SearchDatastore_Task, 'duration_secs': 0.008964} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3010.760481] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/dbe98cf8-30c7-4c58-9b20-e13338cfebc8/ts-2024-12-01-04-38-56 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3010.760745] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-588f295b-1e25-4069-ac31-7bcc41f53259 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3010.771969] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/dbe98cf8-30c7-4c58-9b20-e13338cfebc8/ts-2024-12-01-04-38-56 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3010.772097] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image dbe98cf8-30c7-4c58-9b20-e13338cfebc8 is no longer used by this node. Pending deletion! [ 3010.772246] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/dbe98cf8-30c7-4c58-9b20-e13338cfebc8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3010.772469] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/c5ac5b34-1066-4ce2-8907-7e7c06ef9fca" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3010.772586] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/c5ac5b34-1066-4ce2-8907-7e7c06ef9fca" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3010.772911] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/c5ac5b34-1066-4ce2-8907-7e7c06ef9fca" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3010.773178] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e16dcb3-caf6-4b7c-bd50-37eff4d0c651 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3010.777271] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3010.777271] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5221389f-5bd8-a89c-96ce-cca64cc16492" [ 3010.777271] env[61663]: _type = "Task" [ 3010.777271] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3010.784431] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5221389f-5bd8-a89c-96ce-cca64cc16492, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3011.288157] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5221389f-5bd8-a89c-96ce-cca64cc16492, 'name': SearchDatastore_Task, 'duration_secs': 0.007622} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3011.288446] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/c5ac5b34-1066-4ce2-8907-7e7c06ef9fca/ts-2024-12-01-04-38-57 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3011.288708] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3411abb4-f229-45ed-8134-05d6f3a68d1d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3011.300029] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/c5ac5b34-1066-4ce2-8907-7e7c06ef9fca/ts-2024-12-01-04-38-57 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3011.300175] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image c5ac5b34-1066-4ce2-8907-7e7c06ef9fca is no longer used by this node. Pending deletion! [ 3011.300336] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/c5ac5b34-1066-4ce2-8907-7e7c06ef9fca" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3011.300618] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/1774658d-ac50-4386-b0b7-c15a48aa1b6b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3011.300755] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/1774658d-ac50-4386-b0b7-c15a48aa1b6b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3011.301134] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/1774658d-ac50-4386-b0b7-c15a48aa1b6b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3011.301370] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21d37f1f-4d4c-4469-980a-0a2a372188aa {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3011.305457] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3011.305457] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]524c811a-cef3-a08d-e392-38e0851abd93" [ 3011.305457] env[61663]: _type = "Task" [ 3011.305457] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3011.313756] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]524c811a-cef3-a08d-e392-38e0851abd93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3011.815932] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]524c811a-cef3-a08d-e392-38e0851abd93, 'name': SearchDatastore_Task, 'duration_secs': 0.007841} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3011.816169] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/1774658d-ac50-4386-b0b7-c15a48aa1b6b/ts-2024-12-01-04-38-57 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3011.816434] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee6e4e51-b38f-4a8f-b2e2-a580e5b3998e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3011.827895] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/1774658d-ac50-4386-b0b7-c15a48aa1b6b/ts-2024-12-01-04-38-57 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3011.828070] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 1774658d-ac50-4386-b0b7-c15a48aa1b6b is no longer used by this node. Pending deletion! [ 3011.828215] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/1774658d-ac50-4386-b0b7-c15a48aa1b6b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3011.828430] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/2a9ca5d3-3b45-461f-a3e4-b175afe45546" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3011.828549] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/2a9ca5d3-3b45-461f-a3e4-b175afe45546" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3011.828920] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/2a9ca5d3-3b45-461f-a3e4-b175afe45546" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3011.829171] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63a34277-738a-4394-b4a5-a2e4648cc7e8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3011.833100] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3011.833100] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5284d475-5f07-e594-abdf-242d05b9af3d" [ 3011.833100] env[61663]: _type = "Task" [ 3011.833100] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3011.840228] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5284d475-5f07-e594-abdf-242d05b9af3d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3012.342935] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5284d475-5f07-e594-abdf-242d05b9af3d, 'name': SearchDatastore_Task, 'duration_secs': 0.007948} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3012.343251] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/2a9ca5d3-3b45-461f-a3e4-b175afe45546/ts-2024-12-01-04-38-58 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3012.343422] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57171812-6435-4c64-bcb2-921b529a45ff {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3012.354747] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/2a9ca5d3-3b45-461f-a3e4-b175afe45546/ts-2024-12-01-04-38-58 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3012.354901] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 2a9ca5d3-3b45-461f-a3e4-b175afe45546 is no longer used by this node. Pending deletion! [ 3012.355079] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/2a9ca5d3-3b45-461f-a3e4-b175afe45546" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3012.355298] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/d47c0f10-5211-4f6f-bf33-cbebd085c9d7" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3012.355418] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/d47c0f10-5211-4f6f-bf33-cbebd085c9d7" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3012.355732] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/d47c0f10-5211-4f6f-bf33-cbebd085c9d7" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3012.355991] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af9f4f57-3086-4463-bf3d-070666cdb5c7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3012.360153] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3012.360153] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5213708e-e3f4-06ed-2485-708efeb86658" [ 3012.360153] env[61663]: _type = "Task" [ 3012.360153] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3012.367583] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5213708e-e3f4-06ed-2485-708efeb86658, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3012.870879] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5213708e-e3f4-06ed-2485-708efeb86658, 'name': SearchDatastore_Task, 'duration_secs': 0.007392} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3012.871159] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/d47c0f10-5211-4f6f-bf33-cbebd085c9d7/ts-2024-12-01-04-38-58 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3012.871425] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-324b0376-466a-4020-bc46-4bf7b4899050 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3012.882800] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/d47c0f10-5211-4f6f-bf33-cbebd085c9d7/ts-2024-12-01-04-38-58 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3012.882960] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image d47c0f10-5211-4f6f-bf33-cbebd085c9d7 is no longer used by this node. Pending deletion! [ 3012.883119] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/d47c0f10-5211-4f6f-bf33-cbebd085c9d7" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3012.883337] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/c1e52a57-5693-4c12-ab6b-f5188b2be7b6" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3012.883454] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/c1e52a57-5693-4c12-ab6b-f5188b2be7b6" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3012.883770] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/c1e52a57-5693-4c12-ab6b-f5188b2be7b6" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3012.884051] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbd8cbd0-7f29-482d-8463-c6e8c203793a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3012.888143] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3012.888143] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52155fb5-0014-b221-b701-bbedc29a3025" [ 3012.888143] env[61663]: _type = "Task" [ 3012.888143] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3012.895191] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52155fb5-0014-b221-b701-bbedc29a3025, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3013.397775] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52155fb5-0014-b221-b701-bbedc29a3025, 'name': SearchDatastore_Task, 'duration_secs': 0.007495} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3013.398082] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/c1e52a57-5693-4c12-ab6b-f5188b2be7b6/ts-2024-12-01-04-38-59 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3013.398336] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f5ba578-567e-4ae1-9de1-eec116de0ac1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3013.409719] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/c1e52a57-5693-4c12-ab6b-f5188b2be7b6/ts-2024-12-01-04-38-59 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3013.409875] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image c1e52a57-5693-4c12-ab6b-f5188b2be7b6 is no longer used by this node. Pending deletion! [ 3013.410037] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/c1e52a57-5693-4c12-ab6b-f5188b2be7b6" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3013.410268] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/a5d4bfb7-a624-40e8-b795-7d4cbb201189" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3013.410386] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/a5d4bfb7-a624-40e8-b795-7d4cbb201189" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3013.410690] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/a5d4bfb7-a624-40e8-b795-7d4cbb201189" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3013.410947] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23efb0f5-af93-4d20-a9e1-366ed6d28a0c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3013.416155] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3013.416155] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52fb7092-5549-58fa-bdc0-6a895055cef9" [ 3013.416155] env[61663]: _type = "Task" [ 3013.416155] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3013.422251] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52fb7092-5549-58fa-bdc0-6a895055cef9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3013.925800] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52fb7092-5549-58fa-bdc0-6a895055cef9, 'name': SearchDatastore_Task, 'duration_secs': 0.007483} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3013.926081] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/a5d4bfb7-a624-40e8-b795-7d4cbb201189/ts-2024-12-01-04-38-59 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3013.926339] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a75d1166-9fc1-4c9a-a427-3d8ad99a0ee0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3013.937712] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/a5d4bfb7-a624-40e8-b795-7d4cbb201189/ts-2024-12-01-04-38-59 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3013.937865] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image a5d4bfb7-a624-40e8-b795-7d4cbb201189 is no longer used by this node. Pending deletion! [ 3013.938031] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/a5d4bfb7-a624-40e8-b795-7d4cbb201189" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3013.938327] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/74a23a47-222b-4ae7-9ac7-09b3dd86fd1e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3013.938448] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/74a23a47-222b-4ae7-9ac7-09b3dd86fd1e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3013.938810] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/74a23a47-222b-4ae7-9ac7-09b3dd86fd1e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3013.939075] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0814dc15-e562-477e-bb2f-7fea62957231 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3013.943110] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3013.943110] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]521185e6-a2a2-8df0-b934-6d8eacbd47bc" [ 3013.943110] env[61663]: _type = "Task" [ 3013.943110] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3013.949995] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]521185e6-a2a2-8df0-b934-6d8eacbd47bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3014.453392] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]521185e6-a2a2-8df0-b934-6d8eacbd47bc, 'name': SearchDatastore_Task, 'duration_secs': 0.007453} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3014.453690] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/74a23a47-222b-4ae7-9ac7-09b3dd86fd1e/ts-2024-12-01-04-39-00 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3014.453928] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9753e3f4-f8b0-48fa-bbd8-02623f9cd63e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3014.465894] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/74a23a47-222b-4ae7-9ac7-09b3dd86fd1e/ts-2024-12-01-04-39-00 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3014.466041] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 74a23a47-222b-4ae7-9ac7-09b3dd86fd1e is no longer used by this node. Pending deletion! [ 3014.466204] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/74a23a47-222b-4ae7-9ac7-09b3dd86fd1e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3014.466417] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/3059ec87-123e-4fc8-b73c-1220b342229b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3014.466601] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/3059ec87-123e-4fc8-b73c-1220b342229b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3014.466843] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/3059ec87-123e-4fc8-b73c-1220b342229b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3014.467098] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc9ce554-cfee-439c-8606-aa1cc24a903d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3014.471248] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3014.471248] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5282e2bb-4967-232f-6430-d7957065988c" [ 3014.471248] env[61663]: _type = "Task" [ 3014.471248] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3014.478739] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5282e2bb-4967-232f-6430-d7957065988c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3014.982029] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5282e2bb-4967-232f-6430-d7957065988c, 'name': SearchDatastore_Task, 'duration_secs': 0.007129} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3014.982283] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/3059ec87-123e-4fc8-b73c-1220b342229b/ts-2024-12-01-04-39-00 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3014.982550] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-988a1d30-94c9-4402-b7d3-33b2ba09819e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3014.994954] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/3059ec87-123e-4fc8-b73c-1220b342229b/ts-2024-12-01-04-39-00 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3014.995106] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 3059ec87-123e-4fc8-b73c-1220b342229b is no longer used by this node. Pending deletion! [ 3014.995272] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/3059ec87-123e-4fc8-b73c-1220b342229b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3014.995487] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/11ca84b7-360f-4797-a4e1-4d3a662cae9f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3014.995644] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/11ca84b7-360f-4797-a4e1-4d3a662cae9f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3014.995984] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/11ca84b7-360f-4797-a4e1-4d3a662cae9f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3014.996244] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa6ccd9e-900c-4edc-bdee-1efb5e0cf49f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3015.000399] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3015.000399] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b2526c-871f-5f9e-471f-d3913ce4beb3" [ 3015.000399] env[61663]: _type = "Task" [ 3015.000399] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3015.007461] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b2526c-871f-5f9e-471f-d3913ce4beb3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3015.510489] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b2526c-871f-5f9e-471f-d3913ce4beb3, 'name': SearchDatastore_Task, 'duration_secs': 0.007324} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3015.510776] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/11ca84b7-360f-4797-a4e1-4d3a662cae9f/ts-2024-12-01-04-39-01 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3015.511061] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-644a43f9-9913-46ea-b18c-683f39df5bb1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3015.522485] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/11ca84b7-360f-4797-a4e1-4d3a662cae9f/ts-2024-12-01-04-39-01 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3015.522635] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 11ca84b7-360f-4797-a4e1-4d3a662cae9f is no longer used by this node. Pending deletion! [ 3015.522797] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/11ca84b7-360f-4797-a4e1-4d3a662cae9f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3015.523023] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/ebf7fdaf-a3f9-487d-9733-c0f819d0568d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3015.523148] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/ebf7fdaf-a3f9-487d-9733-c0f819d0568d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3015.523462] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/ebf7fdaf-a3f9-487d-9733-c0f819d0568d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3015.523727] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e10ca3f-4f0d-4132-ae8d-a406eb82af29 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3015.528157] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3015.528157] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ee3e9c-aeb4-2166-7184-cf57714ea6d1" [ 3015.528157] env[61663]: _type = "Task" [ 3015.528157] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3015.535402] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ee3e9c-aeb4-2166-7184-cf57714ea6d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3016.038769] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ee3e9c-aeb4-2166-7184-cf57714ea6d1, 'name': SearchDatastore_Task, 'duration_secs': 0.007713} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3016.039023] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/ebf7fdaf-a3f9-487d-9733-c0f819d0568d/ts-2024-12-01-04-39-01 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3016.039285] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-573b848a-6372-4a20-9d4a-d2356df79a86 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3016.050184] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/ebf7fdaf-a3f9-487d-9733-c0f819d0568d/ts-2024-12-01-04-39-01 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3016.050344] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image ebf7fdaf-a3f9-487d-9733-c0f819d0568d is no longer used by this node. Pending deletion! [ 3016.050487] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/ebf7fdaf-a3f9-487d-9733-c0f819d0568d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3016.050700] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/e17d38df-d304-41ac-ae32-37ebfe45e904" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3016.050825] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/e17d38df-d304-41ac-ae32-37ebfe45e904" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3016.051164] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/e17d38df-d304-41ac-ae32-37ebfe45e904" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3016.051393] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed3065e5-b692-4f39-9463-bbe47db7a12b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3016.055349] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3016.055349] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520aefa8-1aeb-68da-3485-c5b15cd48d73" [ 3016.055349] env[61663]: _type = "Task" [ 3016.055349] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3016.062553] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520aefa8-1aeb-68da-3485-c5b15cd48d73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3016.566411] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520aefa8-1aeb-68da-3485-c5b15cd48d73, 'name': SearchDatastore_Task, 'duration_secs': 0.007139} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3016.566793] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/e17d38df-d304-41ac-ae32-37ebfe45e904/ts-2024-12-01-04-39-02 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3016.566938] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bfad6153-05d3-4680-958c-a6bafe1c726b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3016.577722] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/e17d38df-d304-41ac-ae32-37ebfe45e904/ts-2024-12-01-04-39-02 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3016.577924] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image e17d38df-d304-41ac-ae32-37ebfe45e904 is no longer used by this node. Pending deletion! [ 3016.578067] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/e17d38df-d304-41ac-ae32-37ebfe45e904" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3016.578291] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/f2beb18e-0b9b-4c89-b6b4-b81c1fa41bbf" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3016.578410] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/f2beb18e-0b9b-4c89-b6b4-b81c1fa41bbf" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3016.578724] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2beb18e-0b9b-4c89-b6b4-b81c1fa41bbf" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3016.578959] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ef02b4f-d95f-4232-87e3-9e0119bd3f5f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3016.582856] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3016.582856] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f419dc-565c-bb23-5619-0dde67df9baf" [ 3016.582856] env[61663]: _type = "Task" [ 3016.582856] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3016.589932] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f419dc-565c-bb23-5619-0dde67df9baf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3017.093622] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f419dc-565c-bb23-5619-0dde67df9baf, 'name': SearchDatastore_Task, 'duration_secs': 0.007992} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3017.093886] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/f2beb18e-0b9b-4c89-b6b4-b81c1fa41bbf is no longer used. Deleting! [ 3017.094045] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/f2beb18e-0b9b-4c89-b6b4-b81c1fa41bbf {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3017.094310] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b02875b-cb04-46dd-8e76-f9232c77cff3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3017.100585] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3017.100585] env[61663]: value = "task-1690940" [ 3017.100585] env[61663]: _type = "Task" [ 3017.100585] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3017.107895] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690940, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3017.610535] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690940, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099799} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3017.610843] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3017.610907] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/f2beb18e-0b9b-4c89-b6b4-b81c1fa41bbf" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3017.611151] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/9ef032b6-32b1-4aed-83ef-e5e7e99b5fa9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3017.611272] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/9ef032b6-32b1-4aed-83ef-e5e7e99b5fa9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3017.611587] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/9ef032b6-32b1-4aed-83ef-e5e7e99b5fa9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3017.611849] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21737c54-743d-4e90-9824-a7ae7e91b785 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3017.615927] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3017.615927] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52edeede-9e93-6827-91e5-b5a8720f779a" [ 3017.615927] env[61663]: _type = "Task" [ 3017.615927] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3017.623147] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52edeede-9e93-6827-91e5-b5a8720f779a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3018.126418] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52edeede-9e93-6827-91e5-b5a8720f779a, 'name': SearchDatastore_Task, 'duration_secs': 0.007714} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3018.126689] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/9ef032b6-32b1-4aed-83ef-e5e7e99b5fa9/ts-2024-12-01-04-39-03 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3018.127140] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-512fe9f3-1c70-4036-bb2f-8a8e5cf2d20a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3018.137331] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/9ef032b6-32b1-4aed-83ef-e5e7e99b5fa9/ts-2024-12-01-04-39-03 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3018.137474] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 9ef032b6-32b1-4aed-83ef-e5e7e99b5fa9 is no longer used by this node. Pending deletion! [ 3018.137631] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/9ef032b6-32b1-4aed-83ef-e5e7e99b5fa9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3018.137874] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/ec84a6ff-8183-4491-917d-4a8ea9876b12" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3018.138039] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/ec84a6ff-8183-4491-917d-4a8ea9876b12" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3018.138304] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/ec84a6ff-8183-4491-917d-4a8ea9876b12" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3018.138565] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fe5de90-a2f4-41ce-bae5-0bb36076cb1d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3018.142523] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3018.142523] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52af7e55-6bb1-b2ec-addd-04efd8b43266" [ 3018.142523] env[61663]: _type = "Task" [ 3018.142523] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3018.149919] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52af7e55-6bb1-b2ec-addd-04efd8b43266, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3018.653664] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52af7e55-6bb1-b2ec-addd-04efd8b43266, 'name': SearchDatastore_Task, 'duration_secs': 0.007102} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3018.654065] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/ec84a6ff-8183-4491-917d-4a8ea9876b12/ts-2024-12-01-04-39-04 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3018.654274] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e87e3f7-c037-4676-92ad-7d10228dc2c0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3018.665128] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/ec84a6ff-8183-4491-917d-4a8ea9876b12/ts-2024-12-01-04-39-04 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3018.665378] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image ec84a6ff-8183-4491-917d-4a8ea9876b12 is no longer used by this node. Pending deletion! [ 3018.665511] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/ec84a6ff-8183-4491-917d-4a8ea9876b12" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3018.665632] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/984c8d8c-cf46-49bc-95b4-0fe5a136d843" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3018.665751] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/984c8d8c-cf46-49bc-95b4-0fe5a136d843" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3018.666076] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/984c8d8c-cf46-49bc-95b4-0fe5a136d843" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3018.666302] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e1aed5e-7b51-4849-98d8-89ff3668cec2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3018.670199] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3018.670199] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5298e4b9-7656-c748-9642-353893bff550" [ 3018.670199] env[61663]: _type = "Task" [ 3018.670199] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3018.677046] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5298e4b9-7656-c748-9642-353893bff550, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3019.182409] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5298e4b9-7656-c748-9642-353893bff550, 'name': SearchDatastore_Task, 'duration_secs': 0.007117} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3019.182870] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/984c8d8c-cf46-49bc-95b4-0fe5a136d843/ts-2024-12-01-04-39-05 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3019.185017] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad2ab920-de09-4bbe-a82d-389e8681739b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3019.194582] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/984c8d8c-cf46-49bc-95b4-0fe5a136d843/ts-2024-12-01-04-39-05 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3019.194965] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 984c8d8c-cf46-49bc-95b4-0fe5a136d843 is no longer used by this node. Pending deletion! [ 3019.195283] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/984c8d8c-cf46-49bc-95b4-0fe5a136d843" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3019.195676] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/2924d62d-8c6b-494e-adc7-6a02fccfd81c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3019.197025] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/2924d62d-8c6b-494e-adc7-6a02fccfd81c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3019.197025] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/2924d62d-8c6b-494e-adc7-6a02fccfd81c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3019.197025] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e61e57d0-25b6-4cd8-9643-914292ba9813 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3019.203825] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3019.203825] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ed6ca4-465b-a631-41ea-582898bf23ee" [ 3019.203825] env[61663]: _type = "Task" [ 3019.203825] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3019.209463] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ed6ca4-465b-a631-41ea-582898bf23ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3019.712329] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ed6ca4-465b-a631-41ea-582898bf23ee, 'name': SearchDatastore_Task, 'duration_secs': 0.007383} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3019.712862] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/2924d62d-8c6b-494e-adc7-6a02fccfd81c/ts-2024-12-01-04-39-05 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3019.713317] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-347e8d46-c090-4a66-99b2-dc04094ba989 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3019.725065] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/2924d62d-8c6b-494e-adc7-6a02fccfd81c/ts-2024-12-01-04-39-05 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3019.725278] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 2924d62d-8c6b-494e-adc7-6a02fccfd81c is no longer used by this node. Pending deletion! [ 3019.725493] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/2924d62d-8c6b-494e-adc7-6a02fccfd81c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3019.725769] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/323b6fba-f7ae-4090-a2fe-ed5413cdcdf0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3019.725994] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/323b6fba-f7ae-4090-a2fe-ed5413cdcdf0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3019.726345] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/323b6fba-f7ae-4090-a2fe-ed5413cdcdf0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3019.726642] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3185731e-8698-4fd5-bb00-97a287107b16 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3019.730705] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3019.730705] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52195a07-2774-d9ae-35da-788e11fc8bf9" [ 3019.730705] env[61663]: _type = "Task" [ 3019.730705] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3019.737777] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52195a07-2774-d9ae-35da-788e11fc8bf9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3020.241539] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52195a07-2774-d9ae-35da-788e11fc8bf9, 'name': SearchDatastore_Task, 'duration_secs': 0.006889} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3020.241823] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/323b6fba-f7ae-4090-a2fe-ed5413cdcdf0/ts-2024-12-01-04-39-06 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3020.242101] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f31cc679-75f9-48d5-bf68-9bcb31d46b47 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3020.253970] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/323b6fba-f7ae-4090-a2fe-ed5413cdcdf0/ts-2024-12-01-04-39-06 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3020.254207] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 323b6fba-f7ae-4090-a2fe-ed5413cdcdf0 is no longer used by this node. Pending deletion! [ 3020.254391] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/323b6fba-f7ae-4090-a2fe-ed5413cdcdf0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3020.254613] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/6d9791f1-bb1c-4ac9-8d1f-bf87b31b7832" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3020.254977] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/6d9791f1-bb1c-4ac9-8d1f-bf87b31b7832" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3020.255102] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/6d9791f1-bb1c-4ac9-8d1f-bf87b31b7832" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3020.255403] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a44f1ec7-c416-43a6-99db-261f48e687e7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3020.263443] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3020.263443] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5200e9c7-72ae-9e6c-1a26-f9d4fe12f9d3" [ 3020.263443] env[61663]: _type = "Task" [ 3020.263443] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3020.270846] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5200e9c7-72ae-9e6c-1a26-f9d4fe12f9d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3020.774360] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5200e9c7-72ae-9e6c-1a26-f9d4fe12f9d3, 'name': SearchDatastore_Task, 'duration_secs': 0.010457} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3020.774688] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/6d9791f1-bb1c-4ac9-8d1f-bf87b31b7832/ts-2024-12-01-04-39-06 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3020.775250] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3248e8d0-bd55-4e99-9961-c94c42425db7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3020.784983] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/6d9791f1-bb1c-4ac9-8d1f-bf87b31b7832/ts-2024-12-01-04-39-06 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3020.785159] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 6d9791f1-bb1c-4ac9-8d1f-bf87b31b7832 is no longer used by this node. Pending deletion! [ 3020.785293] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/6d9791f1-bb1c-4ac9-8d1f-bf87b31b7832" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3020.785503] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/4e1bcbe9-ac7d-40e2-ade3-56411594b5a9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3020.785615] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/4e1bcbe9-ac7d-40e2-ade3-56411594b5a9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3020.785915] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/4e1bcbe9-ac7d-40e2-ade3-56411594b5a9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3020.786149] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b871977f-069f-4b2b-97b4-a6337853a1b2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3020.789876] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3020.789876] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c6d7fe-7dcb-ac11-d056-6f98beefd291" [ 3020.789876] env[61663]: _type = "Task" [ 3020.789876] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3020.797121] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c6d7fe-7dcb-ac11-d056-6f98beefd291, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3021.300482] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c6d7fe-7dcb-ac11-d056-6f98beefd291, 'name': SearchDatastore_Task, 'duration_secs': 0.007337} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3021.300735] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/4e1bcbe9-ac7d-40e2-ade3-56411594b5a9/ts-2024-12-01-04-39-07 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3021.301091] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d8d0c96d-ff77-44d1-a496-521d5f3f20ad {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3021.312642] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/4e1bcbe9-ac7d-40e2-ade3-56411594b5a9/ts-2024-12-01-04-39-07 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3021.312811] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 4e1bcbe9-ac7d-40e2-ade3-56411594b5a9 is no longer used by this node. Pending deletion! [ 3021.312946] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/4e1bcbe9-ac7d-40e2-ade3-56411594b5a9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3021.313210] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/524a6be2-98e8-434c-8e58-0dd909d663fb" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3021.313337] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/524a6be2-98e8-434c-8e58-0dd909d663fb" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3021.313654] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/524a6be2-98e8-434c-8e58-0dd909d663fb" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3021.313931] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e471a3c3-fd4e-4354-953b-cab79f2e9c7b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3021.318572] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3021.318572] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5247ad38-a0d5-0dab-138f-e493baec6d71" [ 3021.318572] env[61663]: _type = "Task" [ 3021.318572] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3021.326019] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5247ad38-a0d5-0dab-138f-e493baec6d71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3021.830452] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5247ad38-a0d5-0dab-138f-e493baec6d71, 'name': SearchDatastore_Task, 'duration_secs': 0.00867} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3021.830876] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/524a6be2-98e8-434c-8e58-0dd909d663fb is no longer used. Deleting! [ 3021.830985] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/524a6be2-98e8-434c-8e58-0dd909d663fb {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3021.831293] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be8fe4b4-2d66-4f5a-b0a8-8edb73d43ac0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3021.837803] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3021.837803] env[61663]: value = "task-1690941" [ 3021.837803] env[61663]: _type = "Task" [ 3021.837803] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3021.845506] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690941, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3022.348373] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690941, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103464} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3022.348620] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3022.348888] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/524a6be2-98e8-434c-8e58-0dd909d663fb" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3022.349052] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/4fef67d3-e8e3-418d-af89-0e3e8c040c58" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3022.349188] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/4fef67d3-e8e3-418d-af89-0e3e8c040c58" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3022.349507] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/4fef67d3-e8e3-418d-af89-0e3e8c040c58" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3022.349774] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b97307b6-a89a-497c-86d7-c9b550e50f73 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3022.355138] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3022.355138] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5247ce3f-220e-6066-dbb5-3595493d4455" [ 3022.355138] env[61663]: _type = "Task" [ 3022.355138] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3022.362426] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5247ce3f-220e-6066-dbb5-3595493d4455, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3022.865130] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5247ce3f-220e-6066-dbb5-3595493d4455, 'name': SearchDatastore_Task, 'duration_secs': 0.008363} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3022.865518] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/4fef67d3-e8e3-418d-af89-0e3e8c040c58/ts-2024-12-01-04-39-08 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3022.865835] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8fabb848-d1d4-47a9-bfe0-48d0781daa45 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3022.878058] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/4fef67d3-e8e3-418d-af89-0e3e8c040c58/ts-2024-12-01-04-39-08 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3022.878374] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 4fef67d3-e8e3-418d-af89-0e3e8c040c58 is no longer used by this node. Pending deletion! [ 3022.878653] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/4fef67d3-e8e3-418d-af89-0e3e8c040c58" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3022.879018] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/d29e34a7-3cd6-44fc-823b-e8dacfa91294" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3022.879256] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/d29e34a7-3cd6-44fc-823b-e8dacfa91294" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3022.879658] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/d29e34a7-3cd6-44fc-823b-e8dacfa91294" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3022.879996] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fded3051-b463-42d7-81c8-040971e58889 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3022.884309] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3022.884309] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f01d40-e4f1-7945-7346-eb10b262d3cf" [ 3022.884309] env[61663]: _type = "Task" [ 3022.884309] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3022.892152] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f01d40-e4f1-7945-7346-eb10b262d3cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3023.396061] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f01d40-e4f1-7945-7346-eb10b262d3cf, 'name': SearchDatastore_Task, 'duration_secs': 0.007585} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3023.396339] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/d29e34a7-3cd6-44fc-823b-e8dacfa91294/ts-2024-12-01-04-39-09 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3023.396622] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e52c293-d179-4e1c-aefa-1f32efba5617 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3023.410010] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/d29e34a7-3cd6-44fc-823b-e8dacfa91294/ts-2024-12-01-04-39-09 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3023.410316] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image d29e34a7-3cd6-44fc-823b-e8dacfa91294 is no longer used by this node. Pending deletion! [ 3023.410554] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/d29e34a7-3cd6-44fc-823b-e8dacfa91294" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3023.410842] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/4eb71522-82a2-491d-9889-1b8191502ad7" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3023.411094] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/4eb71522-82a2-491d-9889-1b8191502ad7" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3023.411477] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/4eb71522-82a2-491d-9889-1b8191502ad7" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3023.411828] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71db9760-1271-4119-88b6-3250db5a3c44 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3023.416979] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3023.416979] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5233e6b4-73e2-3ae1-dc1a-3bab3f654cd6" [ 3023.416979] env[61663]: _type = "Task" [ 3023.416979] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3023.425703] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5233e6b4-73e2-3ae1-dc1a-3bab3f654cd6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3023.927244] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5233e6b4-73e2-3ae1-dc1a-3bab3f654cd6, 'name': SearchDatastore_Task, 'duration_secs': 0.008519} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3023.927594] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/4eb71522-82a2-491d-9889-1b8191502ad7/ts-2024-12-01-04-39-09 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3023.927747] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d304c58d-9202-4763-a64f-894121558446 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3023.939297] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/4eb71522-82a2-491d-9889-1b8191502ad7/ts-2024-12-01-04-39-09 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3023.939503] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 4eb71522-82a2-491d-9889-1b8191502ad7 is no longer used by this node. Pending deletion! [ 3023.939600] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/4eb71522-82a2-491d-9889-1b8191502ad7" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3023.939812] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/1e0860e2-4aa5-48a7-bc29-49ea8e6f038e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3023.939929] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/1e0860e2-4aa5-48a7-bc29-49ea8e6f038e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3023.940304] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/1e0860e2-4aa5-48a7-bc29-49ea8e6f038e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3023.940588] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13f678d9-f755-46dd-9784-15dd4db54f2b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3023.945379] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3023.945379] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52518175-f19a-e7db-c6fd-f0736635ebc0" [ 3023.945379] env[61663]: _type = "Task" [ 3023.945379] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3023.952837] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52518175-f19a-e7db-c6fd-f0736635ebc0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3024.457382] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52518175-f19a-e7db-c6fd-f0736635ebc0, 'name': SearchDatastore_Task, 'duration_secs': 0.008217} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3024.457702] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/1e0860e2-4aa5-48a7-bc29-49ea8e6f038e/ts-2024-12-01-04-39-10 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3024.458057] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc0e51ef-2d02-4356-b58d-556e7295aa2d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3024.469952] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/1e0860e2-4aa5-48a7-bc29-49ea8e6f038e/ts-2024-12-01-04-39-10 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3024.470077] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 1e0860e2-4aa5-48a7-bc29-49ea8e6f038e is no longer used by this node. Pending deletion! [ 3024.470256] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/1e0860e2-4aa5-48a7-bc29-49ea8e6f038e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3024.470471] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/e015a2f3-e16b-4dc6-8f4c-407c2a6de19e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3024.470590] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/e015a2f3-e16b-4dc6-8f4c-407c2a6de19e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3024.470907] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/e015a2f3-e16b-4dc6-8f4c-407c2a6de19e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3024.471187] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a308430-812b-46f7-bfeb-0ae5d4113b32 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3024.475663] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3024.475663] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52429966-fe66-8032-6d4a-fa1d859b12ca" [ 3024.475663] env[61663]: _type = "Task" [ 3024.475663] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3024.483752] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52429966-fe66-8032-6d4a-fa1d859b12ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3024.986356] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52429966-fe66-8032-6d4a-fa1d859b12ca, 'name': SearchDatastore_Task, 'duration_secs': 0.007824} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3024.986723] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/e015a2f3-e16b-4dc6-8f4c-407c2a6de19e/ts-2024-12-01-04-39-10 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3024.986881] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-52b49f1b-b0fb-4e1f-843f-ad8b41241e58 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3024.997819] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/e015a2f3-e16b-4dc6-8f4c-407c2a6de19e/ts-2024-12-01-04-39-10 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3024.997968] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image e015a2f3-e16b-4dc6-8f4c-407c2a6de19e is no longer used by this node. Pending deletion! [ 3024.998150] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/e015a2f3-e16b-4dc6-8f4c-407c2a6de19e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3024.998367] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/f69db335-1cd0-4f6e-a5d1-8d90cfa58327" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3024.998484] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/f69db335-1cd0-4f6e-a5d1-8d90cfa58327" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3024.998794] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/f69db335-1cd0-4f6e-a5d1-8d90cfa58327" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3024.999042] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fad703d-275a-49db-a33a-9abe2d25ca68 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3025.003515] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3025.003515] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a1459e-451d-b44f-1ddc-7c5831a8ee8c" [ 3025.003515] env[61663]: _type = "Task" [ 3025.003515] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3025.010581] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a1459e-451d-b44f-1ddc-7c5831a8ee8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3025.513592] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a1459e-451d-b44f-1ddc-7c5831a8ee8c, 'name': SearchDatastore_Task, 'duration_secs': 0.00742} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3025.513872] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/f69db335-1cd0-4f6e-a5d1-8d90cfa58327/ts-2024-12-01-04-39-11 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3025.514148] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38af2412-e154-4be7-be33-fe49c09a8bc6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3025.525817] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/f69db335-1cd0-4f6e-a5d1-8d90cfa58327/ts-2024-12-01-04-39-11 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3025.525989] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image f69db335-1cd0-4f6e-a5d1-8d90cfa58327 is no longer used by this node. Pending deletion! [ 3025.526174] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/f69db335-1cd0-4f6e-a5d1-8d90cfa58327" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3025.526403] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/39330051-57b1-4e72-be41-abb0b16cc33a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3025.526532] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/39330051-57b1-4e72-be41-abb0b16cc33a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3025.526826] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/39330051-57b1-4e72-be41-abb0b16cc33a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3025.527130] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f6e6adc-9184-4e9c-9f1a-2eb2dac124de {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3025.531202] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3025.531202] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5258c6cc-88ff-7e6a-4293-4de9a32bb1eb" [ 3025.531202] env[61663]: _type = "Task" [ 3025.531202] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3025.539362] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5258c6cc-88ff-7e6a-4293-4de9a32bb1eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3025.816055] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d4ce421c-9bb5-4fb2-ba43-8ae777e18361 tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "69194463-5c6d-4119-9c19-91b24149bd8b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3026.043291] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5258c6cc-88ff-7e6a-4293-4de9a32bb1eb, 'name': SearchDatastore_Task, 'duration_secs': 0.00835} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3026.043874] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/39330051-57b1-4e72-be41-abb0b16cc33a is no longer used. Deleting! [ 3026.044091] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/39330051-57b1-4e72-be41-abb0b16cc33a {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3026.044477] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-473d5248-eea3-49ef-be89-a415d2e269c2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3026.051437] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3026.051437] env[61663]: value = "task-1690942" [ 3026.051437] env[61663]: _type = "Task" [ 3026.051437] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3026.059556] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690942, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3026.561630] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690942, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107588} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3026.564878] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3026.564878] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/39330051-57b1-4e72-be41-abb0b16cc33a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3026.564878] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/00d273a5-a4e1-4994-a5c1-8e4cf0688a26" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3026.564878] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/00d273a5-a4e1-4994-a5c1-8e4cf0688a26" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3026.564878] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/00d273a5-a4e1-4994-a5c1-8e4cf0688a26" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3026.564878] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b3c1601-0f0f-4a7d-94e2-6055455d6e04 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3026.570015] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3026.570015] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529e7751-d7f0-94f7-4619-8deeb52b166d" [ 3026.570015] env[61663]: _type = "Task" [ 3026.570015] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3026.574806] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529e7751-d7f0-94f7-4619-8deeb52b166d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3027.080033] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529e7751-d7f0-94f7-4619-8deeb52b166d, 'name': SearchDatastore_Task, 'duration_secs': 0.007931} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3027.080033] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/00d273a5-a4e1-4994-a5c1-8e4cf0688a26/ts-2024-12-01-04-39-12 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3027.080033] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-05ba6b98-badd-43de-ac03-24311a970184 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3027.090685] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/00d273a5-a4e1-4994-a5c1-8e4cf0688a26/ts-2024-12-01-04-39-12 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3027.091039] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 00d273a5-a4e1-4994-a5c1-8e4cf0688a26 is no longer used by this node. Pending deletion! [ 3027.091352] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/00d273a5-a4e1-4994-a5c1-8e4cf0688a26" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3027.091700] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/c8afd113-3027-41bd-9997-52494019a75a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3027.091950] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/c8afd113-3027-41bd-9997-52494019a75a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3027.092400] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/c8afd113-3027-41bd-9997-52494019a75a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3027.092764] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b8ec7ac-86c9-42c6-8247-7a22285492a3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3027.097064] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3027.097064] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52bffd34-b8eb-873a-252e-187c44be87cd" [ 3027.097064] env[61663]: _type = "Task" [ 3027.097064] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3027.104919] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52bffd34-b8eb-873a-252e-187c44be87cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3027.608589] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52bffd34-b8eb-873a-252e-187c44be87cd, 'name': SearchDatastore_Task, 'duration_secs': 0.00851} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3027.608827] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/c8afd113-3027-41bd-9997-52494019a75a/ts-2024-12-01-04-39-13 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3027.609181] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7de5f31d-bb47-4d85-9e91-c9b756efbe58 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3027.622867] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/c8afd113-3027-41bd-9997-52494019a75a/ts-2024-12-01-04-39-13 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3027.623014] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image c8afd113-3027-41bd-9997-52494019a75a is no longer used by this node. Pending deletion! [ 3027.623244] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/c8afd113-3027-41bd-9997-52494019a75a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3027.623439] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/71c322df-e849-41dd-a670-5fc17c04f595" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3027.623562] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/71c322df-e849-41dd-a670-5fc17c04f595" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3027.623882] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/71c322df-e849-41dd-a670-5fc17c04f595" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3027.624121] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a450263e-2f39-4f05-a3a5-096cba9b937c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3027.628392] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3027.628392] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c32cac-7e34-96d7-ed9f-ab3b09817570" [ 3027.628392] env[61663]: _type = "Task" [ 3027.628392] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3027.635786] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c32cac-7e34-96d7-ed9f-ab3b09817570, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3028.138713] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c32cac-7e34-96d7-ed9f-ab3b09817570, 'name': SearchDatastore_Task, 'duration_secs': 0.009288} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3028.139095] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/71c322df-e849-41dd-a670-5fc17c04f595/ts-2024-12-01-04-39-13 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3028.139260] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-42a4e589-5a30-4cfa-b64e-ca2e68637e4b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3028.150549] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/71c322df-e849-41dd-a670-5fc17c04f595/ts-2024-12-01-04-39-13 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3028.150703] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 71c322df-e849-41dd-a670-5fc17c04f595 is no longer used by this node. Pending deletion! [ 3028.150869] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/71c322df-e849-41dd-a670-5fc17c04f595" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3028.151093] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/3d8f0569-2724-4a72-a695-ce407e7156ac" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3028.151216] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/3d8f0569-2724-4a72-a695-ce407e7156ac" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3028.151524] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/3d8f0569-2724-4a72-a695-ce407e7156ac" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3028.151744] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21005fd7-0d2f-46e1-86ac-1cabc6d7f57a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3028.155736] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3028.155736] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525587ed-d6b3-1b8f-a675-eadadb3446a0" [ 3028.155736] env[61663]: _type = "Task" [ 3028.155736] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3028.162917] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525587ed-d6b3-1b8f-a675-eadadb3446a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3028.668118] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525587ed-d6b3-1b8f-a675-eadadb3446a0, 'name': SearchDatastore_Task, 'duration_secs': 0.007931} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3028.668118] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/3d8f0569-2724-4a72-a695-ce407e7156ac/ts-2024-12-01-04-39-14 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3028.668422] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-492dccdd-6ed8-4c46-a079-e4a1a165c856 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3028.683489] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/3d8f0569-2724-4a72-a695-ce407e7156ac/ts-2024-12-01-04-39-14 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3028.683642] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 3d8f0569-2724-4a72-a695-ce407e7156ac is no longer used by this node. Pending deletion! [ 3028.683805] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/3d8f0569-2724-4a72-a695-ce407e7156ac" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3028.684029] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/36e6041c-f0b7-47d3-a69d-bb0272f32cc1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3028.684179] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/36e6041c-f0b7-47d3-a69d-bb0272f32cc1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3028.684539] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/36e6041c-f0b7-47d3-a69d-bb0272f32cc1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3028.684794] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f72dd984-cd77-47f3-b477-ff3dbb1002ee {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3028.689179] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3028.689179] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52297ce9-5b7c-6883-865f-dbe3e486af5e" [ 3028.689179] env[61663]: _type = "Task" [ 3028.689179] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3028.696842] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52297ce9-5b7c-6883-865f-dbe3e486af5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3029.199860] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52297ce9-5b7c-6883-865f-dbe3e486af5e, 'name': SearchDatastore_Task, 'duration_secs': 0.009864} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3029.200292] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/36e6041c-f0b7-47d3-a69d-bb0272f32cc1/ts-2024-12-01-04-39-15 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3029.200483] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-674e4742-290a-4a3f-945f-150fff39efd1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3029.214847] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/36e6041c-f0b7-47d3-a69d-bb0272f32cc1/ts-2024-12-01-04-39-15 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3029.215036] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 36e6041c-f0b7-47d3-a69d-bb0272f32cc1 is no longer used by this node. Pending deletion! [ 3029.215169] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/36e6041c-f0b7-47d3-a69d-bb0272f32cc1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3029.215391] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/38acfbf9-930c-40d4-8678-600f25208ac3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3029.215511] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/38acfbf9-930c-40d4-8678-600f25208ac3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3029.215820] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/38acfbf9-930c-40d4-8678-600f25208ac3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3029.216112] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e35f7537-12a4-4540-9643-76b7c60b7677 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3029.220208] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3029.220208] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ba9792-eee7-4c89-c095-3ffe160a5d25" [ 3029.220208] env[61663]: _type = "Task" [ 3029.220208] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3029.227836] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ba9792-eee7-4c89-c095-3ffe160a5d25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3029.733484] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ba9792-eee7-4c89-c095-3ffe160a5d25, 'name': SearchDatastore_Task, 'duration_secs': 0.009223} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3029.733484] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/38acfbf9-930c-40d4-8678-600f25208ac3/ts-2024-12-01-04-39-15 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3029.733484] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-013ea51e-0da3-4d8d-a046-7a4cae176e99 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3029.744778] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/38acfbf9-930c-40d4-8678-600f25208ac3/ts-2024-12-01-04-39-15 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3029.744778] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 38acfbf9-930c-40d4-8678-600f25208ac3 is no longer used by this node. Pending deletion! [ 3029.744778] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/38acfbf9-930c-40d4-8678-600f25208ac3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3029.744778] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/6e87a12d-1fdd-456e-b056-8948da229620" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3029.744778] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/6e87a12d-1fdd-456e-b056-8948da229620" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3029.745330] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/6e87a12d-1fdd-456e-b056-8948da229620" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3029.746425] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58508ade-9e55-48de-97a0-c74b69c457ce {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3029.751115] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3029.751115] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f6a150-1681-4b96-f8eb-80afe5e0f59f" [ 3029.751115] env[61663]: _type = "Task" [ 3029.751115] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3029.759767] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f6a150-1681-4b96-f8eb-80afe5e0f59f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3030.262107] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f6a150-1681-4b96-f8eb-80afe5e0f59f, 'name': SearchDatastore_Task, 'duration_secs': 0.007418} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3030.262501] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/6e87a12d-1fdd-456e-b056-8948da229620/ts-2024-12-01-04-39-16 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3030.262572] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5fe0150-c66c-42ec-a806-336fcdaeb79c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3030.274050] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/6e87a12d-1fdd-456e-b056-8948da229620/ts-2024-12-01-04-39-16 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3030.274223] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 6e87a12d-1fdd-456e-b056-8948da229620 is no longer used by this node. Pending deletion! [ 3030.274376] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/6e87a12d-1fdd-456e-b056-8948da229620" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3030.274629] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/67a73694-b49a-4b05-972a-b9b964215dcd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3030.274753] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/67a73694-b49a-4b05-972a-b9b964215dcd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3030.275100] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/67a73694-b49a-4b05-972a-b9b964215dcd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3030.275386] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd42fc38-0354-4a9c-b30c-3b1cc7fd2c77 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3030.279713] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3030.279713] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528c0ff5-4ea0-3c51-d1a5-7034bdeaa70d" [ 3030.279713] env[61663]: _type = "Task" [ 3030.279713] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3030.287683] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528c0ff5-4ea0-3c51-d1a5-7034bdeaa70d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3030.790454] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528c0ff5-4ea0-3c51-d1a5-7034bdeaa70d, 'name': SearchDatastore_Task, 'duration_secs': 0.00723} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3030.790735] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/67a73694-b49a-4b05-972a-b9b964215dcd/ts-2024-12-01-04-39-16 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3030.790989] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8979c24e-4e95-4882-9579-234097b57781 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3030.802663] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/67a73694-b49a-4b05-972a-b9b964215dcd/ts-2024-12-01-04-39-16 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3030.802822] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 67a73694-b49a-4b05-972a-b9b964215dcd is no longer used by this node. Pending deletion! [ 3030.802989] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/67a73694-b49a-4b05-972a-b9b964215dcd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3030.803232] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/23a4c44e-7377-464f-819d-efe7b68df971" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3030.803347] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/23a4c44e-7377-464f-819d-efe7b68df971" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3030.803671] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/23a4c44e-7377-464f-819d-efe7b68df971" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3030.803927] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-281c1f06-fa37-413a-b6ac-223db1a6db80 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3030.809048] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3030.809048] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d721a1-7c19-7c41-6443-2a4df70a36e2" [ 3030.809048] env[61663]: _type = "Task" [ 3030.809048] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3030.816650] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d721a1-7c19-7c41-6443-2a4df70a36e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3031.324524] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d721a1-7c19-7c41-6443-2a4df70a36e2, 'name': SearchDatastore_Task, 'duration_secs': 0.008102} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3031.324854] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/23a4c44e-7377-464f-819d-efe7b68df971/ts-2024-12-01-04-39-17 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3031.325116] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e7653e0-c047-48d4-bfb9-149a02bdc4df {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3031.337688] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/23a4c44e-7377-464f-819d-efe7b68df971/ts-2024-12-01-04-39-17 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3031.337850] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 23a4c44e-7377-464f-819d-efe7b68df971 is no longer used by this node. Pending deletion! [ 3031.338016] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/23a4c44e-7377-464f-819d-efe7b68df971" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3031.338308] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/33deed77-2408-4c4d-8820-0bdbee5ff694" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3031.338433] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/33deed77-2408-4c4d-8820-0bdbee5ff694" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3031.338753] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/33deed77-2408-4c4d-8820-0bdbee5ff694" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3031.339054] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0b4d1db-3c81-4ce7-b89d-54fe3b02fc78 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3031.348852] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3031.348852] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5273f927-1216-c010-9514-55d193b7e6a3" [ 3031.348852] env[61663]: _type = "Task" [ 3031.348852] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3031.366702] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5273f927-1216-c010-9514-55d193b7e6a3, 'name': SearchDatastore_Task, 'duration_secs': 0.008172} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3031.366985] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/33deed77-2408-4c4d-8820-0bdbee5ff694/ts-2024-12-01-04-39-17 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3031.367327] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c9694b77-6e20-4af6-b71a-0de70a24f58e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3031.387413] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/33deed77-2408-4c4d-8820-0bdbee5ff694/ts-2024-12-01-04-39-17 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3031.387557] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 33deed77-2408-4c4d-8820-0bdbee5ff694 is no longer used by this node. Pending deletion! [ 3031.387731] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/33deed77-2408-4c4d-8820-0bdbee5ff694" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3031.387953] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/9c8d9476-aac2-4ace-b8ed-9a24a88e4634" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3031.388153] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/9c8d9476-aac2-4ace-b8ed-9a24a88e4634" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3031.388488] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/9c8d9476-aac2-4ace-b8ed-9a24a88e4634" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3031.388771] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0846a3d3-b051-4346-904e-6d8ef07fccf5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3031.397751] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3031.397751] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]524a480a-146f-a1bf-352e-ef34ba8008e3" [ 3031.397751] env[61663]: _type = "Task" [ 3031.397751] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3031.406093] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]524a480a-146f-a1bf-352e-ef34ba8008e3, 'name': SearchDatastore_Task, 'duration_secs': 0.00794} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3031.406343] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/9c8d9476-aac2-4ace-b8ed-9a24a88e4634/ts-2024-12-01-04-39-17 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3031.406591] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14325df8-fca3-4993-9fde-53b4d12607a2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3031.417347] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/9c8d9476-aac2-4ace-b8ed-9a24a88e4634/ts-2024-12-01-04-39-17 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3031.417494] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 9c8d9476-aac2-4ace-b8ed-9a24a88e4634 is no longer used by this node. Pending deletion! [ 3031.417649] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/9c8d9476-aac2-4ace-b8ed-9a24a88e4634" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3031.417860] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/2f71067c-077d-48a3-9bcf-df9b85c0cb85" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3031.417978] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/2f71067c-077d-48a3-9bcf-df9b85c0cb85" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3031.418340] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/2f71067c-077d-48a3-9bcf-df9b85c0cb85" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3031.418576] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a757f31b-f4f1-48fe-90cf-b4a2061e4c23 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3031.422509] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3031.422509] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526decaa-8c78-28ba-dc3f-a2bcf92932d3" [ 3031.422509] env[61663]: _type = "Task" [ 3031.422509] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3031.430603] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526decaa-8c78-28ba-dc3f-a2bcf92932d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3031.932809] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526decaa-8c78-28ba-dc3f-a2bcf92932d3, 'name': SearchDatastore_Task, 'duration_secs': 0.006839} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3031.933102] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/2f71067c-077d-48a3-9bcf-df9b85c0cb85/ts-2024-12-01-04-39-17 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3031.933363] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9092e705-a69f-4f53-88e3-8cdaa3e44f40 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3031.944383] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/2f71067c-077d-48a3-9bcf-df9b85c0cb85/ts-2024-12-01-04-39-17 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3031.945046] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 2f71067c-077d-48a3-9bcf-df9b85c0cb85 is no longer used by this node. Pending deletion! [ 3031.945046] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/2f71067c-077d-48a3-9bcf-df9b85c0cb85" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3031.945046] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/3e83dcc3-d9a0-4b33-9317-e2b2225d659d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3031.945046] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/3e83dcc3-d9a0-4b33-9317-e2b2225d659d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3031.945440] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/3e83dcc3-d9a0-4b33-9317-e2b2225d659d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3031.945612] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc4e6656-b67c-41f4-bb77-8b7b2a7e169b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3031.949861] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3031.949861] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]523ff30e-1a2a-d903-8b75-b60f46f47886" [ 3031.949861] env[61663]: _type = "Task" [ 3031.949861] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3031.957511] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]523ff30e-1a2a-d903-8b75-b60f46f47886, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3032.460451] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]523ff30e-1a2a-d903-8b75-b60f46f47886, 'name': SearchDatastore_Task, 'duration_secs': 0.00734} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3032.460825] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/3e83dcc3-d9a0-4b33-9317-e2b2225d659d/ts-2024-12-01-04-39-18 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3032.460993] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb80359b-e01c-4d77-89b0-33748bd670e6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3032.472693] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/3e83dcc3-d9a0-4b33-9317-e2b2225d659d/ts-2024-12-01-04-39-18 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3032.472838] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 3e83dcc3-d9a0-4b33-9317-e2b2225d659d is no longer used by this node. Pending deletion! [ 3032.473009] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/3e83dcc3-d9a0-4b33-9317-e2b2225d659d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3032.473227] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/d48901a3-c585-4a20-8855-bb9e62839be1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3032.473349] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/d48901a3-c585-4a20-8855-bb9e62839be1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3032.473649] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/d48901a3-c585-4a20-8855-bb9e62839be1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3032.473890] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbdc852d-b603-47ba-83ba-109f4d357836 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3032.477787] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3032.477787] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52cc0ca0-7895-e4e3-e9b8-3fd292974998" [ 3032.477787] env[61663]: _type = "Task" [ 3032.477787] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3032.485108] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52cc0ca0-7895-e4e3-e9b8-3fd292974998, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3032.988916] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52cc0ca0-7895-e4e3-e9b8-3fd292974998, 'name': SearchDatastore_Task, 'duration_secs': 0.0074} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3032.989259] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/d48901a3-c585-4a20-8855-bb9e62839be1/ts-2024-12-01-04-39-18 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3032.989563] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7926de77-6b19-4abd-9e10-adc9dc468bea {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3033.002009] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/d48901a3-c585-4a20-8855-bb9e62839be1/ts-2024-12-01-04-39-18 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3033.002009] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image d48901a3-c585-4a20-8855-bb9e62839be1 is no longer used by this node. Pending deletion! [ 3033.002153] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/d48901a3-c585-4a20-8855-bb9e62839be1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3033.002412] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/2d81c912-9b1b-4486-bcde-c9523ec98296" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3033.002469] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/2d81c912-9b1b-4486-bcde-c9523ec98296" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3033.002864] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/2d81c912-9b1b-4486-bcde-c9523ec98296" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3033.003043] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64fded75-f204-4114-8552-bb08379e15fd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3033.007689] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3033.007689] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52022b6a-c30d-f012-2a6b-e974476c40f3" [ 3033.007689] env[61663]: _type = "Task" [ 3033.007689] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3033.015553] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52022b6a-c30d-f012-2a6b-e974476c40f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3033.518817] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52022b6a-c30d-f012-2a6b-e974476c40f3, 'name': SearchDatastore_Task, 'duration_secs': 0.008027} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3033.519199] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/2d81c912-9b1b-4486-bcde-c9523ec98296/ts-2024-12-01-04-39-19 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3033.519399] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c9c008dc-f67a-4680-a168-4103d1e8e0d2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3033.531367] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/2d81c912-9b1b-4486-bcde-c9523ec98296/ts-2024-12-01-04-39-19 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3033.531526] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 2d81c912-9b1b-4486-bcde-c9523ec98296 is no longer used by this node. Pending deletion! [ 3033.531692] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/2d81c912-9b1b-4486-bcde-c9523ec98296" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3033.531907] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/8625210c-4da9-470c-bf93-a502cc057519" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3033.532032] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/8625210c-4da9-470c-bf93-a502cc057519" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3033.532364] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/8625210c-4da9-470c-bf93-a502cc057519" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3033.532621] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c98f1fb-bbd6-4c4f-928c-9909e67d3341 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3033.536723] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3033.536723] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b43309-c81e-6e9c-c6f6-09077eb40913" [ 3033.536723] env[61663]: _type = "Task" [ 3033.536723] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3033.544504] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b43309-c81e-6e9c-c6f6-09077eb40913, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3034.046643] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b43309-c81e-6e9c-c6f6-09077eb40913, 'name': SearchDatastore_Task, 'duration_secs': 0.008873} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3034.046956] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/8625210c-4da9-470c-bf93-a502cc057519 is no longer used. Deleting! [ 3034.047118] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/8625210c-4da9-470c-bf93-a502cc057519 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3034.047390] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-87f49906-4bf2-4d63-b51b-dd4388d89d60 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3034.053468] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3034.053468] env[61663]: value = "task-1690943" [ 3034.053468] env[61663]: _type = "Task" [ 3034.053468] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3034.060665] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690943, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3034.563183] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690943, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.111924} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3034.563565] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3034.563565] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/8625210c-4da9-470c-bf93-a502cc057519" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3034.563733] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/3061a49c-3929-4619-9b54-7581514e3f71" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3034.563853] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/3061a49c-3929-4619-9b54-7581514e3f71" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3034.564214] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/3061a49c-3929-4619-9b54-7581514e3f71" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3034.564477] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ec047b2-52cb-4719-b13b-865bc97cccee {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3034.568672] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3034.568672] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a5ea9f-613e-ec19-e0c9-be8cf0f58749" [ 3034.568672] env[61663]: _type = "Task" [ 3034.568672] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3034.576126] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a5ea9f-613e-ec19-e0c9-be8cf0f58749, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3035.079937] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a5ea9f-613e-ec19-e0c9-be8cf0f58749, 'name': SearchDatastore_Task, 'duration_secs': 0.007956} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3035.080219] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/3061a49c-3929-4619-9b54-7581514e3f71/ts-2024-12-01-04-39-20 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3035.080480] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-53de46a3-79d4-4163-a1cc-76ece85a8033 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3035.092183] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/3061a49c-3929-4619-9b54-7581514e3f71/ts-2024-12-01-04-39-20 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3035.092337] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 3061a49c-3929-4619-9b54-7581514e3f71 is no longer used by this node. Pending deletion! [ 3035.092499] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/3061a49c-3929-4619-9b54-7581514e3f71" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3035.092713] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/131065ec-a9dc-4193-afa7-87f5d4eab4b9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3035.092833] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/131065ec-a9dc-4193-afa7-87f5d4eab4b9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3035.093162] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/131065ec-a9dc-4193-afa7-87f5d4eab4b9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3035.093401] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c238aea4-c5c4-4683-9ac7-5b70325e03ac {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3035.097369] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3035.097369] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528b8eeb-6221-bb6a-84b0-c5cc4ff11298" [ 3035.097369] env[61663]: _type = "Task" [ 3035.097369] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3035.105480] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528b8eeb-6221-bb6a-84b0-c5cc4ff11298, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3035.607366] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528b8eeb-6221-bb6a-84b0-c5cc4ff11298, 'name': SearchDatastore_Task, 'duration_secs': 0.008069} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3035.607708] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/131065ec-a9dc-4193-afa7-87f5d4eab4b9/ts-2024-12-01-04-39-21 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3035.607887] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2c004d5-52a3-4e78-8d08-dbeadd555ff5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3035.619468] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/131065ec-a9dc-4193-afa7-87f5d4eab4b9/ts-2024-12-01-04-39-21 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3035.619653] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 131065ec-a9dc-4193-afa7-87f5d4eab4b9 is no longer used by this node. Pending deletion! [ 3035.619761] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/131065ec-a9dc-4193-afa7-87f5d4eab4b9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3035.619976] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/a2080b24-48fe-42b9-9fda-4329fe9a97dd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3035.620106] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/a2080b24-48fe-42b9-9fda-4329fe9a97dd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3035.620420] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/a2080b24-48fe-42b9-9fda-4329fe9a97dd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3035.620677] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e11f4fe8-02ee-4c2d-b19a-97edb6b98d55 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3035.624660] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3035.624660] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525ea2c5-80ee-eced-2476-c7f194a427f2" [ 3035.624660] env[61663]: _type = "Task" [ 3035.624660] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3035.631766] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525ea2c5-80ee-eced-2476-c7f194a427f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3036.134628] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525ea2c5-80ee-eced-2476-c7f194a427f2, 'name': SearchDatastore_Task, 'duration_secs': 0.007672} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3036.134990] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/a2080b24-48fe-42b9-9fda-4329fe9a97dd/ts-2024-12-01-04-39-21 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3036.135336] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-78d7081e-87a8-4d61-b7d0-031af778262d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3036.147018] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/a2080b24-48fe-42b9-9fda-4329fe9a97dd/ts-2024-12-01-04-39-21 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3036.147175] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image a2080b24-48fe-42b9-9fda-4329fe9a97dd is no longer used by this node. Pending deletion! [ 3036.147343] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/a2080b24-48fe-42b9-9fda-4329fe9a97dd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3036.147558] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/26b981b2-e23f-455d-a1e7-c4c0d32d7878" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3036.147679] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/26b981b2-e23f-455d-a1e7-c4c0d32d7878" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3036.147984] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/26b981b2-e23f-455d-a1e7-c4c0d32d7878" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3036.148276] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd23aa45-e95a-47e1-bdbc-0392490c10ff {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3036.152671] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3036.152671] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529bc854-67cb-8883-33c3-59fbb58f882a" [ 3036.152671] env[61663]: _type = "Task" [ 3036.152671] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3036.159988] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529bc854-67cb-8883-33c3-59fbb58f882a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3036.663893] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529bc854-67cb-8883-33c3-59fbb58f882a, 'name': SearchDatastore_Task, 'duration_secs': 0.008364} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3036.664198] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/26b981b2-e23f-455d-a1e7-c4c0d32d7878/ts-2024-12-01-04-39-22 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3036.664405] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-842d3ec7-85a6-498c-837e-0cada89a6afb {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3036.675456] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/26b981b2-e23f-455d-a1e7-c4c0d32d7878/ts-2024-12-01-04-39-22 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3036.675600] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 26b981b2-e23f-455d-a1e7-c4c0d32d7878 is no longer used by this node. Pending deletion! [ 3036.675762] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/26b981b2-e23f-455d-a1e7-c4c0d32d7878" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3036.675971] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/6e3252b7-9ae1-429a-82e0-51f6e2a9b30e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3036.676121] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/6e3252b7-9ae1-429a-82e0-51f6e2a9b30e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3036.676442] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/6e3252b7-9ae1-429a-82e0-51f6e2a9b30e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3036.676668] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2eed2e00-dd36-451b-ac68-46044a18a6ff {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3036.680675] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3036.680675] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5262344c-4ece-b2e0-799e-65fee37d2372" [ 3036.680675] env[61663]: _type = "Task" [ 3036.680675] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3036.688282] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5262344c-4ece-b2e0-799e-65fee37d2372, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3037.193861] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5262344c-4ece-b2e0-799e-65fee37d2372, 'name': SearchDatastore_Task, 'duration_secs': 0.007939} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3037.194184] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/6e3252b7-9ae1-429a-82e0-51f6e2a9b30e is no longer used. Deleting! [ 3037.194334] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/6e3252b7-9ae1-429a-82e0-51f6e2a9b30e {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3037.194591] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8b5ca0d7-7a0b-4c93-9767-21e631d62ff3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3037.200883] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3037.200883] env[61663]: value = "task-1690944" [ 3037.200883] env[61663]: _type = "Task" [ 3037.200883] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3037.208565] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690944, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3037.710491] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690944, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104369} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3037.710793] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3037.710914] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/6e3252b7-9ae1-429a-82e0-51f6e2a9b30e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3037.711164] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/e64ef77e-1e05-4e65-b1d3-7d6a4c897825" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3037.711286] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/e64ef77e-1e05-4e65-b1d3-7d6a4c897825" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3037.711601] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/e64ef77e-1e05-4e65-b1d3-7d6a4c897825" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3037.711904] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-962c8061-0cbf-403f-9449-bce2a13ebeac {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3037.716268] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3037.716268] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528b8dc5-b7d5-4761-efec-9a2f933bdca7" [ 3037.716268] env[61663]: _type = "Task" [ 3037.716268] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3037.723889] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528b8dc5-b7d5-4761-efec-9a2f933bdca7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3038.227172] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528b8dc5-b7d5-4761-efec-9a2f933bdca7, 'name': SearchDatastore_Task, 'duration_secs': 0.008257} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3038.227455] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/e64ef77e-1e05-4e65-b1d3-7d6a4c897825/ts-2024-12-01-04-39-24 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3038.227719] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c970ea5-b35b-4dbe-acdd-b9b579925ff1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3038.239210] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/e64ef77e-1e05-4e65-b1d3-7d6a4c897825/ts-2024-12-01-04-39-24 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3038.239362] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image e64ef77e-1e05-4e65-b1d3-7d6a4c897825 is no longer used by this node. Pending deletion! [ 3038.239492] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/e64ef77e-1e05-4e65-b1d3-7d6a4c897825" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3038.239709] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/0b07f35c-3197-4f6d-89f9-78b5793aa461" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3038.239829] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/0b07f35c-3197-4f6d-89f9-78b5793aa461" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3038.240153] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/0b07f35c-3197-4f6d-89f9-78b5793aa461" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3038.240378] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51d01980-2560-49fd-be74-fd0edc4909d8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3038.244331] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3038.244331] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]524f4150-199f-44d8-574f-32d7585b92f1" [ 3038.244331] env[61663]: _type = "Task" [ 3038.244331] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3038.251762] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]524f4150-199f-44d8-574f-32d7585b92f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3038.754863] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]524f4150-199f-44d8-574f-32d7585b92f1, 'name': SearchDatastore_Task, 'duration_secs': 0.00732} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3038.755162] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/0b07f35c-3197-4f6d-89f9-78b5793aa461/ts-2024-12-01-04-39-24 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3038.755426] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9215dcea-49f8-41ff-a0ed-336e47bc1089 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3038.767332] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/0b07f35c-3197-4f6d-89f9-78b5793aa461/ts-2024-12-01-04-39-24 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3038.767491] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 0b07f35c-3197-4f6d-89f9-78b5793aa461 is no longer used by this node. Pending deletion! [ 3038.767656] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/0b07f35c-3197-4f6d-89f9-78b5793aa461" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3038.767883] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/8cbd993d-67be-4dfb-b93e-b278182c3157" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3038.768016] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/8cbd993d-67be-4dfb-b93e-b278182c3157" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3038.768366] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/8cbd993d-67be-4dfb-b93e-b278182c3157" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3038.768638] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47438f72-3ddf-4924-9ad7-25d282c9ea3e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3038.772887] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3038.772887] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5223277b-4f14-1d0f-6ea6-4c61264f9e87" [ 3038.772887] env[61663]: _type = "Task" [ 3038.772887] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3038.781297] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5223277b-4f14-1d0f-6ea6-4c61264f9e87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3039.283520] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5223277b-4f14-1d0f-6ea6-4c61264f9e87, 'name': SearchDatastore_Task, 'duration_secs': 0.008109} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3039.283738] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/8cbd993d-67be-4dfb-b93e-b278182c3157/ts-2024-12-01-04-39-25 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3039.283987] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a08dfabb-785e-4250-a994-291f8ac54fa5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3039.295015] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/8cbd993d-67be-4dfb-b93e-b278182c3157/ts-2024-12-01-04-39-25 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3039.295165] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 8cbd993d-67be-4dfb-b93e-b278182c3157 is no longer used by this node. Pending deletion! [ 3039.295330] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/8cbd993d-67be-4dfb-b93e-b278182c3157" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3039.295545] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/7802114f-fbf0-4134-96d8-abef25c53b77" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3039.295673] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/7802114f-fbf0-4134-96d8-abef25c53b77" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3039.295975] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/7802114f-fbf0-4134-96d8-abef25c53b77" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3039.296232] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d2351a4-4f12-4a49-908e-77035e882287 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3039.300040] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3039.300040] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]521a7c9c-f335-35e8-18e6-db504916f7c7" [ 3039.300040] env[61663]: _type = "Task" [ 3039.300040] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3039.307149] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]521a7c9c-f335-35e8-18e6-db504916f7c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3039.810628] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]521a7c9c-f335-35e8-18e6-db504916f7c7, 'name': SearchDatastore_Task, 'duration_secs': 0.007699} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3039.810941] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/7802114f-fbf0-4134-96d8-abef25c53b77/ts-2024-12-01-04-39-25 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3039.811201] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13b8219b-3e3c-40f6-b59e-d9e0a25a735a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3039.824032] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/7802114f-fbf0-4134-96d8-abef25c53b77/ts-2024-12-01-04-39-25 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3039.824133] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 7802114f-fbf0-4134-96d8-abef25c53b77 is no longer used by this node. Pending deletion! [ 3039.824293] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/7802114f-fbf0-4134-96d8-abef25c53b77" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3039.824513] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/d445cabb-cbb5-4238-8592-5350ff93a539" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3039.824632] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/d445cabb-cbb5-4238-8592-5350ff93a539" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3039.824939] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/d445cabb-cbb5-4238-8592-5350ff93a539" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3039.825273] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b1a5188-9f3e-4b88-9c92-7596e00299a3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3039.829817] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3039.829817] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526dc3ee-c109-7147-2354-3e0009280b91" [ 3039.829817] env[61663]: _type = "Task" [ 3039.829817] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3039.837635] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526dc3ee-c109-7147-2354-3e0009280b91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3040.340299] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526dc3ee-c109-7147-2354-3e0009280b91, 'name': SearchDatastore_Task, 'duration_secs': 0.008496} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3040.340547] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/d445cabb-cbb5-4238-8592-5350ff93a539/ts-2024-12-01-04-39-26 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3040.340775] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f59175b-dbff-4a39-baa5-f1355c67659e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3040.352419] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/d445cabb-cbb5-4238-8592-5350ff93a539/ts-2024-12-01-04-39-26 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3040.352563] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image d445cabb-cbb5-4238-8592-5350ff93a539 is no longer used by this node. Pending deletion! [ 3040.352717] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/d445cabb-cbb5-4238-8592-5350ff93a539" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3040.352927] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/bc93113e-97ff-49a9-b151-f6d337ace761" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3040.353058] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/bc93113e-97ff-49a9-b151-f6d337ace761" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3040.353374] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/bc93113e-97ff-49a9-b151-f6d337ace761" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3040.353607] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-916ba346-acd3-4bb4-8dca-df302a28853d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3040.357418] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3040.357418] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]521ba464-2cba-51dd-a90e-9f5449e1232f" [ 3040.357418] env[61663]: _type = "Task" [ 3040.357418] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3040.364615] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]521ba464-2cba-51dd-a90e-9f5449e1232f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3040.867864] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]521ba464-2cba-51dd-a90e-9f5449e1232f, 'name': SearchDatastore_Task, 'duration_secs': 0.007639} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3040.868263] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/bc93113e-97ff-49a9-b151-f6d337ace761/ts-2024-12-01-04-39-26 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3040.868446] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-62495f27-71d5-4f8b-ab96-bc0d1c79b064 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3040.879447] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/bc93113e-97ff-49a9-b151-f6d337ace761/ts-2024-12-01-04-39-26 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3040.879621] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image bc93113e-97ff-49a9-b151-f6d337ace761 is no longer used by this node. Pending deletion! [ 3040.879758] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/bc93113e-97ff-49a9-b151-f6d337ace761" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3040.879969] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/f8b26198-ad20-425d-9687-f94b89fbecf4" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3040.880100] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/f8b26198-ad20-425d-9687-f94b89fbecf4" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3040.880406] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/f8b26198-ad20-425d-9687-f94b89fbecf4" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3040.880629] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30c812bf-eac9-4fc0-a8a1-547651c36729 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3040.884704] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3040.884704] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522bc8eb-761a-282e-07ff-71a8025b18e4" [ 3040.884704] env[61663]: _type = "Task" [ 3040.884704] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3040.891683] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522bc8eb-761a-282e-07ff-71a8025b18e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3041.395810] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522bc8eb-761a-282e-07ff-71a8025b18e4, 'name': SearchDatastore_Task, 'duration_secs': 0.007471} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3041.396041] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/f8b26198-ad20-425d-9687-f94b89fbecf4/ts-2024-12-01-04-39-27 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3041.396432] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91218435-61c0-4e75-88cd-426bc8809493 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3041.407160] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/f8b26198-ad20-425d-9687-f94b89fbecf4/ts-2024-12-01-04-39-27 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3041.407320] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image f8b26198-ad20-425d-9687-f94b89fbecf4 is no longer used by this node. Pending deletion! [ 3041.407464] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/f8b26198-ad20-425d-9687-f94b89fbecf4" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3041.407677] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/76ba640e-8a40-4006-9952-b40853d0a469" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3041.407795] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/76ba640e-8a40-4006-9952-b40853d0a469" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3041.408119] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/76ba640e-8a40-4006-9952-b40853d0a469" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3041.408374] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cf18b8d-c227-43d7-a1d9-e0eab6dfe4bc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3041.412273] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3041.412273] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e4cd43-161c-a775-2de3-680c96cff80b" [ 3041.412273] env[61663]: _type = "Task" [ 3041.412273] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3041.423447] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e4cd43-161c-a775-2de3-680c96cff80b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3041.922779] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e4cd43-161c-a775-2de3-680c96cff80b, 'name': SearchDatastore_Task, 'duration_secs': 0.007385} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3041.923252] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/76ba640e-8a40-4006-9952-b40853d0a469/ts-2024-12-01-04-39-27 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3041.923324] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49bb572a-885c-420f-9ce0-f1060de058f0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3041.934794] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/76ba640e-8a40-4006-9952-b40853d0a469/ts-2024-12-01-04-39-27 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3041.934972] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 76ba640e-8a40-4006-9952-b40853d0a469 is no longer used by this node. Pending deletion! [ 3041.935145] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/76ba640e-8a40-4006-9952-b40853d0a469" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3041.935359] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/cb2d0a13-f90c-4a3f-bbbb-76f455014d5f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3041.935481] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/cb2d0a13-f90c-4a3f-bbbb-76f455014d5f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3041.935786] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/cb2d0a13-f90c-4a3f-bbbb-76f455014d5f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3041.936016] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad5c8d30-abe4-4c4a-97aa-0163602db228 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3041.940100] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3041.940100] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5222bab2-5044-07ff-db6b-0f9e41b039ff" [ 3041.940100] env[61663]: _type = "Task" [ 3041.940100] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3041.947564] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5222bab2-5044-07ff-db6b-0f9e41b039ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3042.451272] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5222bab2-5044-07ff-db6b-0f9e41b039ff, 'name': SearchDatastore_Task, 'duration_secs': 0.007659} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3042.451505] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/cb2d0a13-f90c-4a3f-bbbb-76f455014d5f/ts-2024-12-01-04-39-28 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3042.451767] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8fa1745e-0544-4d40-8a1f-7dfa1a9201e7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3042.464259] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/cb2d0a13-f90c-4a3f-bbbb-76f455014d5f/ts-2024-12-01-04-39-28 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3042.464412] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image cb2d0a13-f90c-4a3f-bbbb-76f455014d5f is no longer used by this node. Pending deletion! [ 3042.464556] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/cb2d0a13-f90c-4a3f-bbbb-76f455014d5f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3042.465837] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/3e9eff73-26e7-490c-91fe-f3d6b0bebcb6" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3042.465837] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/3e9eff73-26e7-490c-91fe-f3d6b0bebcb6" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3042.465837] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/3e9eff73-26e7-490c-91fe-f3d6b0bebcb6" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3042.465837] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7403ecd1-18d9-4303-b215-710c97c4b200 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3042.471016] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3042.471016] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5296d907-83b3-da4c-10bf-d34a74d01af4" [ 3042.471016] env[61663]: _type = "Task" [ 3042.471016] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3042.477670] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5296d907-83b3-da4c-10bf-d34a74d01af4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3042.980763] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5296d907-83b3-da4c-10bf-d34a74d01af4, 'name': SearchDatastore_Task, 'duration_secs': 0.007991} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3042.981172] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/3e9eff73-26e7-490c-91fe-f3d6b0bebcb6/ts-2024-12-01-04-39-28 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3042.981298] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-22e456bc-c63d-4eff-9155-9bcdec8a8fb3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3042.992809] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/3e9eff73-26e7-490c-91fe-f3d6b0bebcb6/ts-2024-12-01-04-39-28 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3042.992967] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 3e9eff73-26e7-490c-91fe-f3d6b0bebcb6 is no longer used by this node. Pending deletion! [ 3042.993118] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/3e9eff73-26e7-490c-91fe-f3d6b0bebcb6" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3042.993336] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/d6cbb2cd-5592-4e23-affa-4ef78490bcc5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3042.993456] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/d6cbb2cd-5592-4e23-affa-4ef78490bcc5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3042.993776] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/d6cbb2cd-5592-4e23-affa-4ef78490bcc5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3042.994011] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78c4555d-b430-4612-b3b8-c162fcc74948 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3042.998200] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3042.998200] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526607fc-6b38-dc79-86de-ea3372e00ea1" [ 3042.998200] env[61663]: _type = "Task" [ 3042.998200] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3043.005444] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526607fc-6b38-dc79-86de-ea3372e00ea1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3043.508929] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526607fc-6b38-dc79-86de-ea3372e00ea1, 'name': SearchDatastore_Task, 'duration_secs': 0.007711} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3043.509218] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/d6cbb2cd-5592-4e23-affa-4ef78490bcc5/ts-2024-12-01-04-39-29 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3043.509500] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a0f5925-7189-497f-9eec-c5f2143e4f62 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3043.521013] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/d6cbb2cd-5592-4e23-affa-4ef78490bcc5/ts-2024-12-01-04-39-29 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3043.521171] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image d6cbb2cd-5592-4e23-affa-4ef78490bcc5 is no longer used by this node. Pending deletion! [ 3043.521367] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/d6cbb2cd-5592-4e23-affa-4ef78490bcc5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3043.522032] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/2760fb91-83fa-4a6d-8dd2-aae0ca7e17d9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3043.522032] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/2760fb91-83fa-4a6d-8dd2-aae0ca7e17d9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3043.522032] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/2760fb91-83fa-4a6d-8dd2-aae0ca7e17d9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3043.522276] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc7fde6e-bc06-450b-a153-b1d7d831c226 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3043.526408] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3043.526408] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5248dcb3-5a7b-3b83-2482-062626716bce" [ 3043.526408] env[61663]: _type = "Task" [ 3043.526408] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3043.533881] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5248dcb3-5a7b-3b83-2482-062626716bce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3044.036425] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5248dcb3-5a7b-3b83-2482-062626716bce, 'name': SearchDatastore_Task, 'duration_secs': 0.008775} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3044.036791] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/2760fb91-83fa-4a6d-8dd2-aae0ca7e17d9 is no longer used. Deleting! [ 3044.036910] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/2760fb91-83fa-4a6d-8dd2-aae0ca7e17d9 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3044.037224] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a6630084-d4df-4073-9866-953a446590ad {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3044.043316] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3044.043316] env[61663]: value = "task-1690945" [ 3044.043316] env[61663]: _type = "Task" [ 3044.043316] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3044.050400] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690945, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3044.553209] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690945, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100736} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3044.553426] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3044.553606] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/2760fb91-83fa-4a6d-8dd2-aae0ca7e17d9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3044.553908] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/43d1614c-dda2-4b11-bf6c-a2c92ca115ae" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3044.554062] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/43d1614c-dda2-4b11-bf6c-a2c92ca115ae" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3044.554383] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/43d1614c-dda2-4b11-bf6c-a2c92ca115ae" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3044.554650] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6fd7e32-db91-44e5-b2cb-0637fbb4b5ae {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3044.558927] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3044.558927] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52adf7f2-d63a-720a-95e9-b159a04eef27" [ 3044.558927] env[61663]: _type = "Task" [ 3044.558927] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3044.566535] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52adf7f2-d63a-720a-95e9-b159a04eef27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3045.070102] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52adf7f2-d63a-720a-95e9-b159a04eef27, 'name': SearchDatastore_Task, 'duration_secs': 0.009089} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3045.070496] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/43d1614c-dda2-4b11-bf6c-a2c92ca115ae/ts-2024-12-01-04-39-30 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3045.070591] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-51bca432-cf88-4826-a3cc-88dbb3379573 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3045.081682] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/43d1614c-dda2-4b11-bf6c-a2c92ca115ae/ts-2024-12-01-04-39-30 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3045.081828] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 43d1614c-dda2-4b11-bf6c-a2c92ca115ae is no longer used by this node. Pending deletion! [ 3045.081983] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/43d1614c-dda2-4b11-bf6c-a2c92ca115ae" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3045.082214] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/1fc536ee-32c0-4a46-ab16-0620d8b91dcf" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3045.082334] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/1fc536ee-32c0-4a46-ab16-0620d8b91dcf" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3045.082656] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/1fc536ee-32c0-4a46-ab16-0620d8b91dcf" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3045.082890] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71cb5b74-6b0c-4e9c-94a8-9c66fc936778 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3045.086951] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3045.086951] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a1197e-d482-7185-f0d4-67c9642c0076" [ 3045.086951] env[61663]: _type = "Task" [ 3045.086951] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3045.094236] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a1197e-d482-7185-f0d4-67c9642c0076, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3045.600559] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a1197e-d482-7185-f0d4-67c9642c0076, 'name': SearchDatastore_Task, 'duration_secs': 0.007558} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3045.600559] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/1fc536ee-32c0-4a46-ab16-0620d8b91dcf/ts-2024-12-01-04-39-31 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3045.600559] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03b369fe-cbdf-4628-801f-e2a1da81c5f8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3045.613018] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/1fc536ee-32c0-4a46-ab16-0620d8b91dcf/ts-2024-12-01-04-39-31 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3045.613018] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 1fc536ee-32c0-4a46-ab16-0620d8b91dcf is no longer used by this node. Pending deletion! [ 3045.613018] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/1fc536ee-32c0-4a46-ab16-0620d8b91dcf" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3045.613018] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/8316430e-e19a-4cac-b69d-77293c36344b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3045.613018] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/8316430e-e19a-4cac-b69d-77293c36344b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3045.613018] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/8316430e-e19a-4cac-b69d-77293c36344b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3045.613018] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa6227a0-8220-4570-a90e-a19e88e046ac {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3045.616556] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3045.616556] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]523d16e4-8084-5efa-449c-f78695dab678" [ 3045.616556] env[61663]: _type = "Task" [ 3045.616556] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3045.624262] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]523d16e4-8084-5efa-449c-f78695dab678, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3046.130080] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]523d16e4-8084-5efa-449c-f78695dab678, 'name': SearchDatastore_Task, 'duration_secs': 0.007572} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3046.130080] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/8316430e-e19a-4cac-b69d-77293c36344b/ts-2024-12-01-04-39-31 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3046.130080] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-766f7960-0876-4a20-8bec-58846f1876b3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3046.142023] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/8316430e-e19a-4cac-b69d-77293c36344b/ts-2024-12-01-04-39-31 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3046.142023] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 8316430e-e19a-4cac-b69d-77293c36344b is no longer used by this node. Pending deletion! [ 3046.142023] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/8316430e-e19a-4cac-b69d-77293c36344b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3046.142023] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/5cb6d98c-59c9-4096-9992-655aba572600" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3046.142023] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/5cb6d98c-59c9-4096-9992-655aba572600" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3046.142023] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/5cb6d98c-59c9-4096-9992-655aba572600" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3046.142023] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e15c367-66db-4254-b47d-13e93c1eb4ef {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3046.146442] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3046.146442] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528c7ede-6cb6-d715-8b1e-f37bf9e512a6" [ 3046.146442] env[61663]: _type = "Task" [ 3046.146442] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3046.154105] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528c7ede-6cb6-d715-8b1e-f37bf9e512a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3046.657818] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528c7ede-6cb6-d715-8b1e-f37bf9e512a6, 'name': SearchDatastore_Task, 'duration_secs': 0.007781} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3046.658107] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/5cb6d98c-59c9-4096-9992-655aba572600/ts-2024-12-01-04-39-32 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3046.658289] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-44441284-a5fa-4dab-b9d1-867275c33864 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3046.669671] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/5cb6d98c-59c9-4096-9992-655aba572600/ts-2024-12-01-04-39-32 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3046.669855] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 5cb6d98c-59c9-4096-9992-655aba572600 is no longer used by this node. Pending deletion! [ 3046.669972] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/5cb6d98c-59c9-4096-9992-655aba572600" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3046.670202] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/8bcecce4-adda-439a-ac6b-313386b02663" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3046.670321] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/8bcecce4-adda-439a-ac6b-313386b02663" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3046.670617] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/8bcecce4-adda-439a-ac6b-313386b02663" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3046.670862] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bd184ff-916b-4ec6-ab33-f014ee9a11b0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3046.674748] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3046.674748] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f3fd49-ba25-83d4-fc36-5884d34463fe" [ 3046.674748] env[61663]: _type = "Task" [ 3046.674748] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3046.681977] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f3fd49-ba25-83d4-fc36-5884d34463fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3047.185966] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f3fd49-ba25-83d4-fc36-5884d34463fe, 'name': SearchDatastore_Task, 'duration_secs': 0.007215} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3047.186329] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/8bcecce4-adda-439a-ac6b-313386b02663/ts-2024-12-01-04-39-33 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3047.186554] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd78d638-a4ec-4bd8-87ff-d8b458c5cf69 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3047.198445] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/8bcecce4-adda-439a-ac6b-313386b02663/ts-2024-12-01-04-39-33 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3047.198590] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 8bcecce4-adda-439a-ac6b-313386b02663 is no longer used by this node. Pending deletion! [ 3047.198732] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/8bcecce4-adda-439a-ac6b-313386b02663" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3047.198946] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/eef043d0-0b3d-4c15-ab5a-bfc7ee16aa32" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3047.199079] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/eef043d0-0b3d-4c15-ab5a-bfc7ee16aa32" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3047.199401] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/eef043d0-0b3d-4c15-ab5a-bfc7ee16aa32" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3047.199624] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09a5f469-53bd-412e-9458-75cf0188e0ee {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3047.203569] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3047.203569] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c9a271-2d97-7e53-d4cd-89656d85b1de" [ 3047.203569] env[61663]: _type = "Task" [ 3047.203569] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3047.210994] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c9a271-2d97-7e53-d4cd-89656d85b1de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3047.714597] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c9a271-2d97-7e53-d4cd-89656d85b1de, 'name': SearchDatastore_Task, 'duration_secs': 0.00786} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3047.714901] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/eef043d0-0b3d-4c15-ab5a-bfc7ee16aa32/ts-2024-12-01-04-39-33 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3047.715154] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-270ebbf3-40aa-4a2c-bcbf-c92d75c14d02 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3047.726265] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/eef043d0-0b3d-4c15-ab5a-bfc7ee16aa32/ts-2024-12-01-04-39-33 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3047.726419] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image eef043d0-0b3d-4c15-ab5a-bfc7ee16aa32 is no longer used by this node. Pending deletion! [ 3047.726560] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/eef043d0-0b3d-4c15-ab5a-bfc7ee16aa32" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3047.726772] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/c40ba821-6734-4cd5-bef1-c194c2a451d1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3047.726891] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/c40ba821-6734-4cd5-bef1-c194c2a451d1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3047.727221] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/c40ba821-6734-4cd5-bef1-c194c2a451d1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3047.727450] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7aadc165-df78-44ce-bcb9-326fd7177211 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3047.731481] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3047.731481] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52dfcd3a-5250-21ed-4f87-50422ae19e99" [ 3047.731481] env[61663]: _type = "Task" [ 3047.731481] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3047.738629] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52dfcd3a-5250-21ed-4f87-50422ae19e99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3048.242427] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52dfcd3a-5250-21ed-4f87-50422ae19e99, 'name': SearchDatastore_Task, 'duration_secs': 0.008092} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3048.242734] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/c40ba821-6734-4cd5-bef1-c194c2a451d1 is no longer used. Deleting! [ 3048.242873] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/c40ba821-6734-4cd5-bef1-c194c2a451d1 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3048.243184] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ccbd10d9-b7ea-44dd-a6cf-e6a43fbcbcd8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3048.250270] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3048.250270] env[61663]: value = "task-1690946" [ 3048.250270] env[61663]: _type = "Task" [ 3048.250270] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3048.257886] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690946, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3048.737194] env[61663]: WARNING oslo_vmware.rw_handles [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 3048.737194] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 3048.737194] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 3048.737194] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 3048.737194] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 3048.737194] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 3048.737194] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 3048.737194] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 3048.737194] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 3048.737194] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 3048.737194] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 3048.737194] env[61663]: ERROR oslo_vmware.rw_handles [ 3048.737868] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/a0f63704-ac9c-46bf-a203-ed955d967355/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 3048.739883] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 3048.740145] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Copying Virtual Disk [datastore1] vmware_temp/a0f63704-ac9c-46bf-a203-ed955d967355/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/a0f63704-ac9c-46bf-a203-ed955d967355/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 3048.740434] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f541ad2c-02a6-4023-be8b-49eaeb3e9ac5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3048.747913] env[61663]: DEBUG oslo_vmware.api [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Waiting for the task: (returnval){ [ 3048.747913] env[61663]: value = "task-1690947" [ 3048.747913] env[61663]: _type = "Task" [ 3048.747913] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3048.758515] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690946, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.105292} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3048.761352] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3048.761533] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/c40ba821-6734-4cd5-bef1-c194c2a451d1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3048.761751] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/af63da73-029b-4f60-a4fd-5a86c59d21b3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3048.761899] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/af63da73-029b-4f60-a4fd-5a86c59d21b3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3048.762189] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/af63da73-029b-4f60-a4fd-5a86c59d21b3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3048.762441] env[61663]: DEBUG oslo_vmware.api [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Task: {'id': task-1690947, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3048.762638] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e002a755-1a6f-4509-aecb-d543a16c6d8f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3048.766537] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3048.766537] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]523722e1-6271-87eb-0a59-9698ffc6bb40" [ 3048.766537] env[61663]: _type = "Task" [ 3048.766537] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3048.773935] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]523722e1-6271-87eb-0a59-9698ffc6bb40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3049.258692] env[61663]: DEBUG oslo_vmware.exceptions [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 3049.259152] env[61663]: DEBUG oslo_concurrency.lockutils [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3049.259642] env[61663]: ERROR nova.compute.manager [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3049.259642] env[61663]: Faults: ['InvalidArgument'] [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Traceback (most recent call last): [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] yield resources [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] self.driver.spawn(context, instance, image_meta, [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] self._fetch_image_if_missing(context, vi) [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] image_cache(vi, tmp_image_ds_loc) [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] vm_util.copy_virtual_disk( [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] session._wait_for_task(vmdk_copy_task) [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] return self.wait_for_task(task_ref) [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] return evt.wait() [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] result = hub.switch() [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] return self.greenlet.switch() [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] self.f(*self.args, **self.kw) [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] raise exceptions.translate_fault(task_info.error) [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Faults: ['InvalidArgument'] [ 3049.259642] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] [ 3049.260488] env[61663]: INFO nova.compute.manager [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Terminating instance [ 3049.261816] env[61663]: DEBUG oslo_concurrency.lockutils [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3049.262044] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3049.262283] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4a97115e-4dc4-48ea-8aa0-67796abe39d6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3049.265657] env[61663]: DEBUG nova.compute.manager [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 3049.265915] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 3049.266626] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ffc26fb-9a4f-487f-a05b-7246f0109a0b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3049.278641] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]523722e1-6271-87eb-0a59-9698ffc6bb40, 'name': SearchDatastore_Task, 'duration_secs': 0.016779} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3049.281366] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/af63da73-029b-4f60-a4fd-5a86c59d21b3 is no longer used. Deleting! [ 3049.281476] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/af63da73-029b-4f60-a4fd-5a86c59d21b3 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3049.281665] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3049.281828] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 3049.282531] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 3049.282735] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-436ea020-e586-4c9d-9499-09d332589321 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3049.284282] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df62710f-d9e7-4d23-9db9-dca2db5c4e20 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3049.286231] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fa623ed1-0a66-472c-ba97-c30d0de8832f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3049.291329] env[61663]: DEBUG oslo_vmware.api [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for the task: (returnval){ [ 3049.291329] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f29c57-6d8e-1ac2-ca29-21f8f93b0ba2" [ 3049.291329] env[61663]: _type = "Task" [ 3049.291329] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3049.295204] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3049.295204] env[61663]: value = "task-1690948" [ 3049.295204] env[61663]: _type = "Task" [ 3049.295204] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3049.300850] env[61663]: DEBUG oslo_vmware.api [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f29c57-6d8e-1ac2-ca29-21f8f93b0ba2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3049.305386] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690948, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3049.379193] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 3049.379488] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 3049.379723] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Deleting the datastore file [datastore1] 8fc36ed9-9315-4bdb-b4f3-248106a3c681 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3049.380065] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3abae0bc-ded3-49bc-a53d-ee30181fb9f1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3049.387197] env[61663]: DEBUG oslo_vmware.api [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Waiting for the task: (returnval){ [ 3049.387197] env[61663]: value = "task-1690950" [ 3049.387197] env[61663]: _type = "Task" [ 3049.387197] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3049.395892] env[61663]: DEBUG oslo_vmware.api [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Task: {'id': task-1690950, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3049.804499] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690948, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.120126} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3049.807763] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3049.808059] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/af63da73-029b-4f60-a4fd-5a86c59d21b3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3049.808059] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/ed66fcd7-0649-4263-aa2c-0262aaf5e46e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3049.808198] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/ed66fcd7-0649-4263-aa2c-0262aaf5e46e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3049.808555] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/ed66fcd7-0649-4263-aa2c-0262aaf5e46e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3049.808875] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3acd7ed-4ce9-4b8a-9a16-3dd48dbb29ca {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3049.810579] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 3049.810807] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Creating directory with path [datastore1] vmware_temp/5cacf96e-97c6-417c-be98-99df0fc06cde/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3049.811045] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dbddf3ae-eb8d-46a7-82d3-ffa4bfbc2007 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3049.817842] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3049.817842] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529422bc-fa37-b4f8-39a8-d0a6f50261c1" [ 3049.817842] env[61663]: _type = "Task" [ 3049.817842] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3049.823385] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Created directory with path [datastore1] vmware_temp/5cacf96e-97c6-417c-be98-99df0fc06cde/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3049.823587] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Fetch image to [datastore1] vmware_temp/5cacf96e-97c6-417c-be98-99df0fc06cde/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 3049.823763] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/5cacf96e-97c6-417c-be98-99df0fc06cde/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 3049.824465] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa9a70a-7c6b-4793-8ea8-e75febba3ecf {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3049.829626] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529422bc-fa37-b4f8-39a8-d0a6f50261c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3049.833779] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dca77e6-3f42-4276-bac7-01a3ece615b3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3049.842795] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4465a95-118c-4d62-aa50-76a5fd59a1df {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3049.874545] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce6b053e-a3f1-42c6-90f1-dad3d193eaae {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3049.880899] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d526f437-8abd-408f-b1ae-76f3a3ff3cbb {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3049.895435] env[61663]: DEBUG oslo_vmware.api [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Task: {'id': task-1690950, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073293} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3049.895683] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3049.895869] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 3049.896060] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 3049.896265] env[61663]: INFO nova.compute.manager [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Took 0.63 seconds to destroy the instance on the hypervisor. [ 3049.898521] env[61663]: DEBUG nova.compute.claims [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 3049.898709] env[61663]: DEBUG oslo_concurrency.lockutils [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3049.898983] env[61663]: DEBUG oslo_concurrency.lockutils [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3049.904037] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 3049.954879] env[61663]: DEBUG oslo_vmware.rw_handles [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5cacf96e-97c6-417c-be98-99df0fc06cde/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 3050.017142] env[61663]: DEBUG oslo_vmware.rw_handles [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 3050.018061] env[61663]: DEBUG oslo_vmware.rw_handles [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5cacf96e-97c6-417c-be98-99df0fc06cde/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 3050.109129] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e80825e-f173-4ff8-b60a-2f34a564dc0f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3050.116769] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3e105a-5982-4eac-87a0-f5f0def150c8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3050.151180] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb77b2e8-6628-4fde-9178-a997b92611b0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3050.158055] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-234ef616-406b-4499-8f42-ba404acd8fdf {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3050.170784] env[61663]: DEBUG nova.compute.provider_tree [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 3050.179730] env[61663]: DEBUG nova.scheduler.client.report [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 3050.195907] env[61663]: DEBUG oslo_concurrency.lockutils [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.296s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3050.196041] env[61663]: ERROR nova.compute.manager [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3050.196041] env[61663]: Faults: ['InvalidArgument'] [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Traceback (most recent call last): [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] self.driver.spawn(context, instance, image_meta, [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] self._fetch_image_if_missing(context, vi) [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] image_cache(vi, tmp_image_ds_loc) [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] vm_util.copy_virtual_disk( [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] session._wait_for_task(vmdk_copy_task) [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] return self.wait_for_task(task_ref) [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] return evt.wait() [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] result = hub.switch() [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] return self.greenlet.switch() [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] self.f(*self.args, **self.kw) [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] raise exceptions.translate_fault(task_info.error) [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Faults: ['InvalidArgument'] [ 3050.196041] env[61663]: ERROR nova.compute.manager [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] [ 3050.196918] env[61663]: DEBUG nova.compute.utils [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 3050.198282] env[61663]: DEBUG nova.compute.manager [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Build of instance 8fc36ed9-9315-4bdb-b4f3-248106a3c681 was re-scheduled: A specified parameter was not correct: fileType [ 3050.198282] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 3050.198727] env[61663]: DEBUG nova.compute.manager [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 3050.198905] env[61663]: DEBUG nova.compute.manager [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 3050.199093] env[61663]: DEBUG nova.compute.manager [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 3050.199273] env[61663]: DEBUG nova.network.neutron [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 3050.330352] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529422bc-fa37-b4f8-39a8-d0a6f50261c1, 'name': SearchDatastore_Task, 'duration_secs': 0.009629} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3050.330656] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/ed66fcd7-0649-4263-aa2c-0262aaf5e46e/ts-2024-12-01-04-39-36 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3050.330919] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f9fc29b5-f3f2-443c-ae48-15e1a6f6b7bc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3050.349550] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/ed66fcd7-0649-4263-aa2c-0262aaf5e46e/ts-2024-12-01-04-39-36 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3050.349717] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image ed66fcd7-0649-4263-aa2c-0262aaf5e46e is no longer used by this node. Pending deletion! [ 3050.349883] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/ed66fcd7-0649-4263-aa2c-0262aaf5e46e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3050.350124] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/f975138a-c178-43e8-8e69-c591b5852513" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3050.350245] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/f975138a-c178-43e8-8e69-c591b5852513" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3050.350566] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/f975138a-c178-43e8-8e69-c591b5852513" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3050.350856] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2da1fae2-22c8-4f06-901b-18f98cb55dae {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3050.355411] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3050.355411] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5212ada5-d969-add9-1923-e8225bb33cbf" [ 3050.355411] env[61663]: _type = "Task" [ 3050.355411] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3050.363218] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5212ada5-d969-add9-1923-e8225bb33cbf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3050.568346] env[61663]: DEBUG nova.network.neutron [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3050.586127] env[61663]: INFO nova.compute.manager [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Took 0.39 seconds to deallocate network for instance. [ 3050.690316] env[61663]: INFO nova.scheduler.client.report [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Deleted allocations for instance 8fc36ed9-9315-4bdb-b4f3-248106a3c681 [ 3050.710344] env[61663]: DEBUG oslo_concurrency.lockutils [None req-038ede6a-32c9-4fb2-85b9-a252b1d810ca tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Lock "8fc36ed9-9315-4bdb-b4f3-248106a3c681" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 677.595s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3050.710682] env[61663]: DEBUG oslo_concurrency.lockutils [None req-6b8dbaf1-4bdf-4e98-b8e7-7b0fd863c948 tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Lock "8fc36ed9-9315-4bdb-b4f3-248106a3c681" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 481.601s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3050.711058] env[61663]: DEBUG oslo_concurrency.lockutils [None req-6b8dbaf1-4bdf-4e98-b8e7-7b0fd863c948 tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Acquiring lock "8fc36ed9-9315-4bdb-b4f3-248106a3c681-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3050.711361] env[61663]: DEBUG oslo_concurrency.lockutils [None req-6b8dbaf1-4bdf-4e98-b8e7-7b0fd863c948 tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Lock "8fc36ed9-9315-4bdb-b4f3-248106a3c681-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3050.711655] env[61663]: DEBUG oslo_concurrency.lockutils [None req-6b8dbaf1-4bdf-4e98-b8e7-7b0fd863c948 tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Lock "8fc36ed9-9315-4bdb-b4f3-248106a3c681-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3050.713934] env[61663]: INFO nova.compute.manager [None req-6b8dbaf1-4bdf-4e98-b8e7-7b0fd863c948 tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Terminating instance [ 3050.715636] env[61663]: DEBUG nova.compute.manager [None req-6b8dbaf1-4bdf-4e98-b8e7-7b0fd863c948 tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 3050.715836] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-6b8dbaf1-4bdf-4e98-b8e7-7b0fd863c948 tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 3050.716126] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0daaf1b8-bc0c-4323-8190-681bbfe92ca2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3050.725164] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4092c2e8-c844-4a9b-9553-77f00ac52aae {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3050.752409] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-6b8dbaf1-4bdf-4e98-b8e7-7b0fd863c948 tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8fc36ed9-9315-4bdb-b4f3-248106a3c681 could not be found. [ 3050.752409] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-6b8dbaf1-4bdf-4e98-b8e7-7b0fd863c948 tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 3050.752409] env[61663]: INFO nova.compute.manager [None req-6b8dbaf1-4bdf-4e98-b8e7-7b0fd863c948 tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Took 0.04 seconds to destroy the instance on the hypervisor. [ 3050.752590] env[61663]: DEBUG oslo.service.loopingcall [None req-6b8dbaf1-4bdf-4e98-b8e7-7b0fd863c948 tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 3050.752815] env[61663]: DEBUG nova.compute.manager [-] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 3050.752914] env[61663]: DEBUG nova.network.neutron [-] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 3050.778207] env[61663]: DEBUG nova.network.neutron [-] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3050.786361] env[61663]: INFO nova.compute.manager [-] [instance: 8fc36ed9-9315-4bdb-b4f3-248106a3c681] Took 0.03 seconds to deallocate network for instance. [ 3050.866666] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5212ada5-d969-add9-1923-e8225bb33cbf, 'name': SearchDatastore_Task, 'duration_secs': 0.009052} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3050.866975] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/f975138a-c178-43e8-8e69-c591b5852513/ts-2024-12-01-04-39-36 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3050.867261] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e2d87957-5413-4357-981a-be283c0e0272 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3050.872721] env[61663]: DEBUG oslo_concurrency.lockutils [None req-6b8dbaf1-4bdf-4e98-b8e7-7b0fd863c948 tempest-AttachVolumeNegativeTest-1350665205 tempest-AttachVolumeNegativeTest-1350665205-project-member] Lock "8fc36ed9-9315-4bdb-b4f3-248106a3c681" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.162s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3050.879642] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/f975138a-c178-43e8-8e69-c591b5852513/ts-2024-12-01-04-39-36 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3050.879775] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image f975138a-c178-43e8-8e69-c591b5852513 is no longer used by this node. Pending deletion! [ 3050.879935] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/f975138a-c178-43e8-8e69-c591b5852513" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3050.880197] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/56cb81a9-06a8-4287-97ed-0034fdfa8079" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3050.880321] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/56cb81a9-06a8-4287-97ed-0034fdfa8079" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3050.880653] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/56cb81a9-06a8-4287-97ed-0034fdfa8079" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3050.880907] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-213aa24e-0298-467f-a670-d55ddb1e6e25 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3050.886013] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3050.886013] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52bd1b98-a645-aec4-ad39-efa54071614e" [ 3050.886013] env[61663]: _type = "Task" [ 3050.886013] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3050.893852] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52bd1b98-a645-aec4-ad39-efa54071614e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3051.397828] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52bd1b98-a645-aec4-ad39-efa54071614e, 'name': SearchDatastore_Task, 'duration_secs': 0.008772} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3051.400039] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/56cb81a9-06a8-4287-97ed-0034fdfa8079/ts-2024-12-01-04-39-37 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3051.400039] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-286b30cf-88b0-4870-b71c-27ee85d6e100 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3051.411233] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/56cb81a9-06a8-4287-97ed-0034fdfa8079/ts-2024-12-01-04-39-37 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3051.411367] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 56cb81a9-06a8-4287-97ed-0034fdfa8079 is no longer used by this node. Pending deletion! [ 3051.411527] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/56cb81a9-06a8-4287-97ed-0034fdfa8079" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3051.411741] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/018160a2-29e6-4f36-91e9-bc5d4da96fa9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3051.411860] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/018160a2-29e6-4f36-91e9-bc5d4da96fa9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3051.412195] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/018160a2-29e6-4f36-91e9-bc5d4da96fa9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3051.412420] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f3f3383-0058-42ac-bc57-8ca4f0811ad3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3051.416393] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3051.416393] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52daed10-a9ea-988c-bd0b-f807eb9b9a30" [ 3051.416393] env[61663]: _type = "Task" [ 3051.416393] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3051.423515] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52daed10-a9ea-988c-bd0b-f807eb9b9a30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3051.928796] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52daed10-a9ea-988c-bd0b-f807eb9b9a30, 'name': SearchDatastore_Task, 'duration_secs': 0.009767} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3051.928796] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/018160a2-29e6-4f36-91e9-bc5d4da96fa9 is no longer used. Deleting! [ 3051.928796] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/018160a2-29e6-4f36-91e9-bc5d4da96fa9 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3051.928796] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e491512c-4134-42e1-8c9f-1cf8c67a25cc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3051.935021] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3051.935021] env[61663]: value = "task-1690951" [ 3051.935021] env[61663]: _type = "Task" [ 3051.935021] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3051.940033] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690951, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3052.442787] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690951, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.119688} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3052.446284] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3052.446284] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/018160a2-29e6-4f36-91e9-bc5d4da96fa9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3052.446284] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/816092e7-6d05-458d-a8a0-e70695a86b78" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3052.446284] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/816092e7-6d05-458d-a8a0-e70695a86b78" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3052.446284] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/816092e7-6d05-458d-a8a0-e70695a86b78" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3052.446284] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7d55ac5-eab6-4f0e-9726-7a3564794a72 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3052.451025] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3052.451025] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5200c96b-dc94-eaef-c5fe-f2a709fa1acd" [ 3052.451025] env[61663]: _type = "Task" [ 3052.451025] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3052.456761] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5200c96b-dc94-eaef-c5fe-f2a709fa1acd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3052.962066] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5200c96b-dc94-eaef-c5fe-f2a709fa1acd, 'name': SearchDatastore_Task, 'duration_secs': 0.009362} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3052.962066] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/816092e7-6d05-458d-a8a0-e70695a86b78/ts-2024-12-01-04-39-38 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3052.962066] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0c179f7-92fe-4f59-8fa6-e3689878b317 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3052.972706] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/816092e7-6d05-458d-a8a0-e70695a86b78/ts-2024-12-01-04-39-38 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3052.973071] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 816092e7-6d05-458d-a8a0-e70695a86b78 is no longer used by this node. Pending deletion! [ 3052.973356] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/816092e7-6d05-458d-a8a0-e70695a86b78" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3052.973707] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/cfd7dda5-dd02-4e1a-a12e-b39abc6db212" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3052.974065] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/cfd7dda5-dd02-4e1a-a12e-b39abc6db212" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3052.974484] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/cfd7dda5-dd02-4e1a-a12e-b39abc6db212" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3052.974843] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e006821a-c7c6-4c71-8dce-f45a3e5c8a62 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3052.979121] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3052.979121] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5269735f-a5c3-00ba-f3e9-c32eebec2300" [ 3052.979121] env[61663]: _type = "Task" [ 3052.979121] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3052.986600] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5269735f-a5c3-00ba-f3e9-c32eebec2300, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3053.492868] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5269735f-a5c3-00ba-f3e9-c32eebec2300, 'name': SearchDatastore_Task, 'duration_secs': 0.007961} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3053.492868] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/cfd7dda5-dd02-4e1a-a12e-b39abc6db212/ts-2024-12-01-04-39-39 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3053.492868] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-33100692-bad0-4648-95bf-ba1fcb18c7b3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3053.503836] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/cfd7dda5-dd02-4e1a-a12e-b39abc6db212/ts-2024-12-01-04-39-39 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3053.504250] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image cfd7dda5-dd02-4e1a-a12e-b39abc6db212 is no longer used by this node. Pending deletion! [ 3053.504543] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/cfd7dda5-dd02-4e1a-a12e-b39abc6db212" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3053.504922] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/82b30168-1c5e-4ddf-90ab-e57b66681950" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3053.505183] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/82b30168-1c5e-4ddf-90ab-e57b66681950" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3053.505612] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/82b30168-1c5e-4ddf-90ab-e57b66681950" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3053.505982] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ab7b22e-2f00-444d-a316-0c14e888458e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3053.510472] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3053.510472] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ef813a-962a-1068-2718-e568b289e9fe" [ 3053.510472] env[61663]: _type = "Task" [ 3053.510472] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3053.518657] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ef813a-962a-1068-2718-e568b289e9fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3054.022954] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ef813a-962a-1068-2718-e568b289e9fe, 'name': SearchDatastore_Task, 'duration_secs': 0.00741} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3054.022954] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/82b30168-1c5e-4ddf-90ab-e57b66681950/ts-2024-12-01-04-39-39 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3054.022954] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-efd73fae-f8c6-4ed4-9816-d2c9ed94f680 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3054.037161] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/82b30168-1c5e-4ddf-90ab-e57b66681950/ts-2024-12-01-04-39-39 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3054.037161] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 82b30168-1c5e-4ddf-90ab-e57b66681950 is no longer used by this node. Pending deletion! [ 3054.037161] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/82b30168-1c5e-4ddf-90ab-e57b66681950" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3054.037161] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/0694b3db-45b7-4f54-933b-d14fc9f9453d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3054.037161] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/0694b3db-45b7-4f54-933b-d14fc9f9453d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3054.037161] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/0694b3db-45b7-4f54-933b-d14fc9f9453d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3054.037161] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00e1bf7d-b817-4c81-b9a6-29073c6e40f7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3054.040927] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3054.040927] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528c41bc-fa47-1a2e-b4b5-07b8148b5405" [ 3054.040927] env[61663]: _type = "Task" [ 3054.040927] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3054.048402] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528c41bc-fa47-1a2e-b4b5-07b8148b5405, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3054.554332] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528c41bc-fa47-1a2e-b4b5-07b8148b5405, 'name': SearchDatastore_Task, 'duration_secs': 0.00806} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3054.554332] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/0694b3db-45b7-4f54-933b-d14fc9f9453d/ts-2024-12-01-04-39-40 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3054.554332] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1c5aa04-0c2e-41fa-b328-50ac34e6b357 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3054.567019] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/0694b3db-45b7-4f54-933b-d14fc9f9453d/ts-2024-12-01-04-39-40 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3054.567019] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 0694b3db-45b7-4f54-933b-d14fc9f9453d is no longer used by this node. Pending deletion! [ 3054.567019] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/0694b3db-45b7-4f54-933b-d14fc9f9453d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3054.567019] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/68efc246-594d-40f7-8f7d-ae7ca000d332" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3054.567019] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/68efc246-594d-40f7-8f7d-ae7ca000d332" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3054.567019] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/68efc246-594d-40f7-8f7d-ae7ca000d332" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3054.567019] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3793ca74-ea53-4a22-bedf-6ea9f6df6e98 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3054.570346] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3054.570346] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52958129-bd6c-32bc-c671-59d12e0ff9a4" [ 3054.570346] env[61663]: _type = "Task" [ 3054.570346] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3054.578721] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52958129-bd6c-32bc-c671-59d12e0ff9a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3055.085379] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52958129-bd6c-32bc-c671-59d12e0ff9a4, 'name': SearchDatastore_Task, 'duration_secs': 0.008726} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3055.085379] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/68efc246-594d-40f7-8f7d-ae7ca000d332/ts-2024-12-01-04-39-40 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3055.088452] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6ee7ac7-9e83-4b97-92d0-7a00ce313478 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3055.102548] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/68efc246-594d-40f7-8f7d-ae7ca000d332/ts-2024-12-01-04-39-40 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3055.102548] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 68efc246-594d-40f7-8f7d-ae7ca000d332 is no longer used by this node. Pending deletion! [ 3055.102548] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/68efc246-594d-40f7-8f7d-ae7ca000d332" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3055.102548] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/f8bf6918-b157-482d-b29d-665c193e6039" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3055.102548] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/f8bf6918-b157-482d-b29d-665c193e6039" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3055.102998] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/f8bf6918-b157-482d-b29d-665c193e6039" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3055.103428] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88398db2-6b0b-4fde-a1e2-9d72418e7b51 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3055.112040] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3055.112040] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d0f33e-c43b-fc15-1cc4-66abf829108b" [ 3055.112040] env[61663]: _type = "Task" [ 3055.112040] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3055.122095] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d0f33e-c43b-fc15-1cc4-66abf829108b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3055.628035] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d0f33e-c43b-fc15-1cc4-66abf829108b, 'name': SearchDatastore_Task, 'duration_secs': 0.009515} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3055.628500] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/f8bf6918-b157-482d-b29d-665c193e6039/ts-2024-12-01-04-39-41 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3055.628744] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cade50ec-8767-40a6-999e-db3c07db02a6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3055.641125] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/f8bf6918-b157-482d-b29d-665c193e6039/ts-2024-12-01-04-39-41 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3055.641305] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image f8bf6918-b157-482d-b29d-665c193e6039 is no longer used by this node. Pending deletion! [ 3055.641436] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/f8bf6918-b157-482d-b29d-665c193e6039" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3055.641657] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/345a93d0-41fb-4a1f-a9a5-e9f8369e34c5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3055.641801] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/345a93d0-41fb-4a1f-a9a5-e9f8369e34c5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3055.642208] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/345a93d0-41fb-4a1f-a9a5-e9f8369e34c5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3055.642461] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0739503-453c-4093-931d-567d7b74b3f3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3055.646931] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3055.646931] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f8b659-55ff-8c74-cbce-b56f12f3f753" [ 3055.646931] env[61663]: _type = "Task" [ 3055.646931] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3055.654253] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f8b659-55ff-8c74-cbce-b56f12f3f753, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3056.157385] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f8b659-55ff-8c74-cbce-b56f12f3f753, 'name': SearchDatastore_Task, 'duration_secs': 0.010057} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3056.157670] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/345a93d0-41fb-4a1f-a9a5-e9f8369e34c5/ts-2024-12-01-04-39-42 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3056.157941] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90b28b60-2510-47bb-8958-61d447ac1f70 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3056.212173] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/345a93d0-41fb-4a1f-a9a5-e9f8369e34c5/ts-2024-12-01-04-39-42 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3056.212343] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 345a93d0-41fb-4a1f-a9a5-e9f8369e34c5 is no longer used by this node. Pending deletion! [ 3056.212516] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/345a93d0-41fb-4a1f-a9a5-e9f8369e34c5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3056.212737] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/40daedc7-de34-4162-8f24-54bfe58a2ce0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3056.212861] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/40daedc7-de34-4162-8f24-54bfe58a2ce0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3056.213231] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/40daedc7-de34-4162-8f24-54bfe58a2ce0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3056.213561] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e50524c-b34e-443f-ab8c-d62acf615234 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3056.218145] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3056.218145] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52156d2c-e170-5fde-0c71-eff064d10d6d" [ 3056.218145] env[61663]: _type = "Task" [ 3056.218145] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3056.226241] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52156d2c-e170-5fde-0c71-eff064d10d6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3056.728438] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52156d2c-e170-5fde-0c71-eff064d10d6d, 'name': SearchDatastore_Task, 'duration_secs': 0.043154} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3056.728834] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/40daedc7-de34-4162-8f24-54bfe58a2ce0/ts-2024-12-01-04-39-42 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3056.728980] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-78fdfcf7-cc12-42a9-a5a9-31045c5666c1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3056.741297] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/40daedc7-de34-4162-8f24-54bfe58a2ce0/ts-2024-12-01-04-39-42 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3056.741451] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 40daedc7-de34-4162-8f24-54bfe58a2ce0 is no longer used by this node. Pending deletion! [ 3056.741591] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/40daedc7-de34-4162-8f24-54bfe58a2ce0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3056.741805] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/6bc6beb5-7976-44bf-bfce-ab0274ce5856" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3056.741922] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/6bc6beb5-7976-44bf-bfce-ab0274ce5856" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3056.742245] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/6bc6beb5-7976-44bf-bfce-ab0274ce5856" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3056.742466] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38e81af1-d8f9-43b8-a770-fb012cbcc84e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3056.747240] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3056.747240] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527b24ed-3e64-22f1-b280-c350c16464ed" [ 3056.747240] env[61663]: _type = "Task" [ 3056.747240] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3056.754276] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527b24ed-3e64-22f1-b280-c350c16464ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3057.257413] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527b24ed-3e64-22f1-b280-c350c16464ed, 'name': SearchDatastore_Task, 'duration_secs': 0.008209} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3057.257691] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/6bc6beb5-7976-44bf-bfce-ab0274ce5856/ts-2024-12-01-04-39-43 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3057.257952] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e87928d2-c9ec-40eb-8c8c-b451cc0253e7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3057.269750] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/6bc6beb5-7976-44bf-bfce-ab0274ce5856/ts-2024-12-01-04-39-43 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3057.269911] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 6bc6beb5-7976-44bf-bfce-ab0274ce5856 is no longer used by this node. Pending deletion! [ 3057.270089] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/6bc6beb5-7976-44bf-bfce-ab0274ce5856" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3057.270309] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/1eea85c2-d141-4f42-9692-ef3424f757fc" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3057.270428] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/1eea85c2-d141-4f42-9692-ef3424f757fc" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3057.270749] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/1eea85c2-d141-4f42-9692-ef3424f757fc" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3057.270998] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90471665-6b60-45e9-8721-8f9704343037 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3057.275051] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3057.275051] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d1a9de-8aa5-528c-b371-d5e4db0ecd41" [ 3057.275051] env[61663]: _type = "Task" [ 3057.275051] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3057.282221] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d1a9de-8aa5-528c-b371-d5e4db0ecd41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3057.785869] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d1a9de-8aa5-528c-b371-d5e4db0ecd41, 'name': SearchDatastore_Task, 'duration_secs': 0.009067} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3057.786418] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/1eea85c2-d141-4f42-9692-ef3424f757fc is no longer used. Deleting! [ 3057.786621] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/1eea85c2-d141-4f42-9692-ef3424f757fc {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3057.786971] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-589faf06-3557-4361-89e9-47604905d40f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3057.795654] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3057.795654] env[61663]: value = "task-1690952" [ 3057.795654] env[61663]: _type = "Task" [ 3057.795654] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3057.802995] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690952, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3058.305636] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690952, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.11967} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3058.306017] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3058.306304] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/1eea85c2-d141-4f42-9692-ef3424f757fc" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3058.306680] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/cab132b1-4f76-4f44-abbe-dd00c3ea9028" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3058.306911] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/cab132b1-4f76-4f44-abbe-dd00c3ea9028" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3058.307344] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/cab132b1-4f76-4f44-abbe-dd00c3ea9028" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3058.307676] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c0543f5-daaa-4e97-ae83-15d0e590ad7f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3058.312103] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3058.312103] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52928f2a-75e0-1230-fb8d-f23934e8902b" [ 3058.312103] env[61663]: _type = "Task" [ 3058.312103] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3058.320377] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52928f2a-75e0-1230-fb8d-f23934e8902b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3058.823401] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52928f2a-75e0-1230-fb8d-f23934e8902b, 'name': SearchDatastore_Task, 'duration_secs': 0.008365} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3058.823750] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/cab132b1-4f76-4f44-abbe-dd00c3ea9028/ts-2024-12-01-04-39-44 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3058.823899] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e79d1674-725d-4b2a-8ad2-d32bdcfac44c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3058.835448] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/cab132b1-4f76-4f44-abbe-dd00c3ea9028/ts-2024-12-01-04-39-44 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3058.835624] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image cab132b1-4f76-4f44-abbe-dd00c3ea9028 is no longer used by this node. Pending deletion! [ 3058.835814] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/cab132b1-4f76-4f44-abbe-dd00c3ea9028" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3058.836070] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/054d54ae-f4ae-4c8b-899a-95996301b509" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3058.836244] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/054d54ae-f4ae-4c8b-899a-95996301b509" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3058.836567] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/054d54ae-f4ae-4c8b-899a-95996301b509" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3058.836794] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07da00c6-82d6-44f4-9654-3cdf65cd94ec {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3058.840759] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3058.840759] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522134c7-e890-cc92-627c-31dc79ee2df6" [ 3058.840759] env[61663]: _type = "Task" [ 3058.840759] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3058.847729] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522134c7-e890-cc92-627c-31dc79ee2df6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3059.351335] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522134c7-e890-cc92-627c-31dc79ee2df6, 'name': SearchDatastore_Task, 'duration_secs': 0.007887} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3059.351621] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/054d54ae-f4ae-4c8b-899a-95996301b509/ts-2024-12-01-04-39-45 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3059.351985] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58975cb3-bbce-4683-94e7-fafcf6b9c81f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3059.364329] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/054d54ae-f4ae-4c8b-899a-95996301b509/ts-2024-12-01-04-39-45 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3059.364505] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 054d54ae-f4ae-4c8b-899a-95996301b509 is no longer used by this node. Pending deletion! [ 3059.364628] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/054d54ae-f4ae-4c8b-899a-95996301b509" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3059.364852] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/e59c6db1-2b82-4035-a564-459cc2761293" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3059.364986] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/e59c6db1-2b82-4035-a564-459cc2761293" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3059.365301] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/e59c6db1-2b82-4035-a564-459cc2761293" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3059.365569] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12821a77-0613-4ac4-aea8-3d124204c948 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3059.370058] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3059.370058] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520abbde-aaab-a38e-e99d-0bbb70f9befe" [ 3059.370058] env[61663]: _type = "Task" [ 3059.370058] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3059.377533] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520abbde-aaab-a38e-e99d-0bbb70f9befe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3059.882239] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520abbde-aaab-a38e-e99d-0bbb70f9befe, 'name': SearchDatastore_Task, 'duration_secs': 0.009187} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3059.882239] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/e59c6db1-2b82-4035-a564-459cc2761293 is no longer used. Deleting! [ 3059.882239] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/e59c6db1-2b82-4035-a564-459cc2761293 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3059.882239] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-35f3f0ce-3d24-46d4-9fca-83a04fd36b3d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3059.887650] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3059.887650] env[61663]: value = "task-1690953" [ 3059.887650] env[61663]: _type = "Task" [ 3059.887650] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3059.894467] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690953, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3060.397581] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690953, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109665} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3060.397828] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3060.397980] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/e59c6db1-2b82-4035-a564-459cc2761293" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3060.398208] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/4fdbb1f1-9b21-47bc-8afe-37b7da0ada75" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3060.398330] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/4fdbb1f1-9b21-47bc-8afe-37b7da0ada75" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3060.398684] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/4fdbb1f1-9b21-47bc-8afe-37b7da0ada75" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3060.398962] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd655af8-ef62-4bfc-b44c-71c331b370f5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3060.403073] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3060.403073] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ce06d9-a1cc-1264-c156-3a59016afa18" [ 3060.403073] env[61663]: _type = "Task" [ 3060.403073] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3060.409951] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ce06d9-a1cc-1264-c156-3a59016afa18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3060.914026] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ce06d9-a1cc-1264-c156-3a59016afa18, 'name': SearchDatastore_Task, 'duration_secs': 0.008961} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3060.914362] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/4fdbb1f1-9b21-47bc-8afe-37b7da0ada75/ts-2024-12-01-04-39-46 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3060.914611] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f903e62e-1466-4528-a28e-60c144cb5fc3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3060.926751] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/4fdbb1f1-9b21-47bc-8afe-37b7da0ada75/ts-2024-12-01-04-39-46 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3060.926888] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 4fdbb1f1-9b21-47bc-8afe-37b7da0ada75 is no longer used by this node. Pending deletion! [ 3060.927019] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/4fdbb1f1-9b21-47bc-8afe-37b7da0ada75" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3060.927313] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/e9d7a45b-6ee0-421f-82fb-db2ef8922c9b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3060.927394] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/e9d7a45b-6ee0-421f-82fb-db2ef8922c9b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3060.927692] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/e9d7a45b-6ee0-421f-82fb-db2ef8922c9b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3060.927936] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca589dde-4b2a-433a-a071-d3d780e5ffc6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3060.932046] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3060.932046] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e1a91e-1d52-616a-f39e-7f4be261108c" [ 3060.932046] env[61663]: _type = "Task" [ 3060.932046] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3060.939401] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e1a91e-1d52-616a-f39e-7f4be261108c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3061.442462] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e1a91e-1d52-616a-f39e-7f4be261108c, 'name': SearchDatastore_Task, 'duration_secs': 0.007943} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3061.442725] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/e9d7a45b-6ee0-421f-82fb-db2ef8922c9b/ts-2024-12-01-04-39-47 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3061.442983] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8548cf7c-24d7-4143-892d-7f9bdd833f66 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3061.455176] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/e9d7a45b-6ee0-421f-82fb-db2ef8922c9b/ts-2024-12-01-04-39-47 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3061.455316] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image e9d7a45b-6ee0-421f-82fb-db2ef8922c9b is no longer used by this node. Pending deletion! [ 3061.455483] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/e9d7a45b-6ee0-421f-82fb-db2ef8922c9b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3061.455697] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/fbfd932f-7edb-4c7f-b894-dbd8d1eea99c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3061.455828] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/fbfd932f-7edb-4c7f-b894-dbd8d1eea99c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3061.456160] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/fbfd932f-7edb-4c7f-b894-dbd8d1eea99c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3061.456385] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8cccf515-3a80-497c-a3c3-9bc2a0779a30 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3061.460372] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3061.460372] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52063da3-87bf-21db-eeb7-8e393cef220e" [ 3061.460372] env[61663]: _type = "Task" [ 3061.460372] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3061.467397] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52063da3-87bf-21db-eeb7-8e393cef220e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3061.971016] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52063da3-87bf-21db-eeb7-8e393cef220e, 'name': SearchDatastore_Task, 'duration_secs': 0.008317} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3061.971379] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/fbfd932f-7edb-4c7f-b894-dbd8d1eea99c/ts-2024-12-01-04-39-47 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3061.971635] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c35b5ee-b6a5-4eb4-b4d3-2d0372f67bf2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3061.984056] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/fbfd932f-7edb-4c7f-b894-dbd8d1eea99c/ts-2024-12-01-04-39-47 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3061.984215] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image fbfd932f-7edb-4c7f-b894-dbd8d1eea99c is no longer used by this node. Pending deletion! [ 3061.984382] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/fbfd932f-7edb-4c7f-b894-dbd8d1eea99c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3061.984599] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/c9235b52-1834-4387-a334-a8558e809c09" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3061.984717] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/c9235b52-1834-4387-a334-a8558e809c09" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3061.985044] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/c9235b52-1834-4387-a334-a8558e809c09" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3061.985302] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef29b640-a8b9-4586-b5f5-095eef224673 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3061.990673] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3061.990673] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527e8cb0-3874-0833-88d9-60f813c7d837" [ 3061.990673] env[61663]: _type = "Task" [ 3061.990673] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3061.998118] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527e8cb0-3874-0833-88d9-60f813c7d837, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3062.501456] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527e8cb0-3874-0833-88d9-60f813c7d837, 'name': SearchDatastore_Task, 'duration_secs': 0.008179} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3062.501703] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/c9235b52-1834-4387-a334-a8558e809c09/ts-2024-12-01-04-39-48 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3062.501925] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13cc0efa-e650-42e3-b6dc-3d7ec299ddab {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3062.513755] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/c9235b52-1834-4387-a334-a8558e809c09/ts-2024-12-01-04-39-48 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3062.513905] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image c9235b52-1834-4387-a334-a8558e809c09 is no longer used by this node. Pending deletion! [ 3062.514069] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/c9235b52-1834-4387-a334-a8558e809c09" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3062.514282] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/7bca5c6c-972d-456d-8c3c-efdb5e7a0c61" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3062.514401] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/7bca5c6c-972d-456d-8c3c-efdb5e7a0c61" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3062.514716] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/7bca5c6c-972d-456d-8c3c-efdb5e7a0c61" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3062.514942] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57971878-b66f-45d2-8a2e-dada6faf2158 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3062.519052] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3062.519052] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e34959-8ebc-285a-9720-f63c9faba5bb" [ 3062.519052] env[61663]: _type = "Task" [ 3062.519052] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3062.526376] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e34959-8ebc-285a-9720-f63c9faba5bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3063.029600] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e34959-8ebc-285a-9720-f63c9faba5bb, 'name': SearchDatastore_Task, 'duration_secs': 0.008561} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3063.029942] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/7bca5c6c-972d-456d-8c3c-efdb5e7a0c61 is no longer used. Deleting! [ 3063.030094] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/7bca5c6c-972d-456d-8c3c-efdb5e7a0c61 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3063.030373] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4fe7c45f-af41-4478-bd6a-451e5ff2dac1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3063.036185] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3063.036185] env[61663]: value = "task-1690954" [ 3063.036185] env[61663]: _type = "Task" [ 3063.036185] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3063.043413] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690954, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3063.545526] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690954, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.115231} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3063.545740] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3063.545912] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/7bca5c6c-972d-456d-8c3c-efdb5e7a0c61" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3063.546155] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/e0b4fea6-3b85-433a-9943-cd349ea23b4f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3063.546280] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/e0b4fea6-3b85-433a-9943-cd349ea23b4f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3063.546609] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/e0b4fea6-3b85-433a-9943-cd349ea23b4f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3063.546877] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9717f5ff-6a16-4e3b-983e-54c0c29767a2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3063.551079] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3063.551079] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52cc9296-7b07-ab12-e094-705e7628c263" [ 3063.551079] env[61663]: _type = "Task" [ 3063.551079] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3063.558388] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52cc9296-7b07-ab12-e094-705e7628c263, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3064.061354] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52cc9296-7b07-ab12-e094-705e7628c263, 'name': SearchDatastore_Task, 'duration_secs': 0.008063} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3064.061728] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/e0b4fea6-3b85-433a-9943-cd349ea23b4f/ts-2024-12-01-04-39-49 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3064.061860] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0d9ce423-78da-4f2d-ac87-a241a85efee9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3064.072835] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/e0b4fea6-3b85-433a-9943-cd349ea23b4f/ts-2024-12-01-04-39-49 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3064.072974] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image e0b4fea6-3b85-433a-9943-cd349ea23b4f is no longer used by this node. Pending deletion! [ 3064.073151] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/e0b4fea6-3b85-433a-9943-cd349ea23b4f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3064.073362] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/443a8916-4f98-4cb9-9e27-49dd792e901d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3064.073481] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/443a8916-4f98-4cb9-9e27-49dd792e901d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3064.073784] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/443a8916-4f98-4cb9-9e27-49dd792e901d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3064.074011] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b07e3a14-e1d1-4d27-8ebb-b21a89495d2f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3064.077746] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3064.077746] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526b051c-1c5e-8900-34d5-fb59ede1e587" [ 3064.077746] env[61663]: _type = "Task" [ 3064.077746] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3064.084673] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526b051c-1c5e-8900-34d5-fb59ede1e587, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3064.588409] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526b051c-1c5e-8900-34d5-fb59ede1e587, 'name': SearchDatastore_Task, 'duration_secs': 0.007885} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3064.588676] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/443a8916-4f98-4cb9-9e27-49dd792e901d/ts-2024-12-01-04-39-50 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3064.588961] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c993f163-3725-41bb-b919-bd5dae3507f7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3064.600033] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/443a8916-4f98-4cb9-9e27-49dd792e901d/ts-2024-12-01-04-39-50 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3064.600150] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 443a8916-4f98-4cb9-9e27-49dd792e901d is no longer used by this node. Pending deletion! [ 3064.600305] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/443a8916-4f98-4cb9-9e27-49dd792e901d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3064.600518] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/b81753c8-dbd4-4edc-a86b-2b541e309837" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3064.600637] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/b81753c8-dbd4-4edc-a86b-2b541e309837" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3064.600970] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/b81753c8-dbd4-4edc-a86b-2b541e309837" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3064.601239] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba37066b-f0a5-47a6-8d52-6254ded5106d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3064.605100] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3064.605100] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522d042a-9f8b-f3e8-1572-f7f5c61b6871" [ 3064.605100] env[61663]: _type = "Task" [ 3064.605100] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3064.612086] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522d042a-9f8b-f3e8-1572-f7f5c61b6871, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3065.115609] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522d042a-9f8b-f3e8-1572-f7f5c61b6871, 'name': SearchDatastore_Task, 'duration_secs': 0.008136} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3065.115924] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/b81753c8-dbd4-4edc-a86b-2b541e309837/ts-2024-12-01-04-39-50 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3065.116158] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dac18ed4-8c06-4327-b450-a074f0c9e93e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3065.127369] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/b81753c8-dbd4-4edc-a86b-2b541e309837/ts-2024-12-01-04-39-50 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3065.127511] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image b81753c8-dbd4-4edc-a86b-2b541e309837 is no longer used by this node. Pending deletion! [ 3065.127674] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/b81753c8-dbd4-4edc-a86b-2b541e309837" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3065.127888] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/a46827ed-fb3b-46ca-9caa-0ddc78d71fb1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3065.128014] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/a46827ed-fb3b-46ca-9caa-0ddc78d71fb1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3065.128339] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/a46827ed-fb3b-46ca-9caa-0ddc78d71fb1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3065.128573] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62f32d82-1c9e-4bfb-95e6-6fd7bddffc1e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3065.132723] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3065.132723] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522517e8-1edb-3525-14c4-35471d953a10" [ 3065.132723] env[61663]: _type = "Task" [ 3065.132723] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3065.139888] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522517e8-1edb-3525-14c4-35471d953a10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3065.643013] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522517e8-1edb-3525-14c4-35471d953a10, 'name': SearchDatastore_Task, 'duration_secs': 0.007863} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3065.643291] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/a46827ed-fb3b-46ca-9caa-0ddc78d71fb1/ts-2024-12-01-04-39-51 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3065.643540] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8561e9ca-5f57-4233-8c42-bc0f62c7707c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3065.655054] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/a46827ed-fb3b-46ca-9caa-0ddc78d71fb1/ts-2024-12-01-04-39-51 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3065.655200] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image a46827ed-fb3b-46ca-9caa-0ddc78d71fb1 is no longer used by this node. Pending deletion! [ 3065.655360] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/a46827ed-fb3b-46ca-9caa-0ddc78d71fb1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3065.655568] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/295e811f-592c-41d7-8595-6bd159c4ba89" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3065.655685] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/295e811f-592c-41d7-8595-6bd159c4ba89" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3065.656014] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/295e811f-592c-41d7-8595-6bd159c4ba89" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3065.656235] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9f60ede-f4b1-43a3-8e7c-22636d81a109 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3065.661045] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3065.661045] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f1b1cc-72c2-33bf-1b6a-db51716908b9" [ 3065.661045] env[61663]: _type = "Task" [ 3065.661045] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3065.667772] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f1b1cc-72c2-33bf-1b6a-db51716908b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3066.171873] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f1b1cc-72c2-33bf-1b6a-db51716908b9, 'name': SearchDatastore_Task, 'duration_secs': 0.007485} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3066.172196] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/295e811f-592c-41d7-8595-6bd159c4ba89/ts-2024-12-01-04-39-52 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3066.172421] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90f1b875-8bde-4702-8b36-8cb446c9e4af {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3066.183967] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/295e811f-592c-41d7-8595-6bd159c4ba89/ts-2024-12-01-04-39-52 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3066.184127] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 295e811f-592c-41d7-8595-6bd159c4ba89 is no longer used by this node. Pending deletion! [ 3066.184288] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/295e811f-592c-41d7-8595-6bd159c4ba89" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3066.184500] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/42e5d0c4-6b1c-41c3-9f8c-fb976ec060ea" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3066.184620] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/42e5d0c4-6b1c-41c3-9f8c-fb976ec060ea" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3066.184928] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/42e5d0c4-6b1c-41c3-9f8c-fb976ec060ea" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3066.185167] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b054a315-09db-460a-8443-774cebe11bb3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3066.189375] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3066.189375] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520c2a26-9f35-44f1-d60d-9ec820dd2dfd" [ 3066.189375] env[61663]: _type = "Task" [ 3066.189375] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3066.196438] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520c2a26-9f35-44f1-d60d-9ec820dd2dfd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3066.700149] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520c2a26-9f35-44f1-d60d-9ec820dd2dfd, 'name': SearchDatastore_Task, 'duration_secs': 0.007842} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3066.700430] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/42e5d0c4-6b1c-41c3-9f8c-fb976ec060ea/ts-2024-12-01-04-39-52 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3066.700682] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5b61b996-c19e-4d91-bd5f-e31d08a5a085 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3066.713122] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/42e5d0c4-6b1c-41c3-9f8c-fb976ec060ea/ts-2024-12-01-04-39-52 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3066.713271] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 42e5d0c4-6b1c-41c3-9f8c-fb976ec060ea is no longer used by this node. Pending deletion! [ 3066.713438] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/42e5d0c4-6b1c-41c3-9f8c-fb976ec060ea" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3066.713654] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/2f533c88-a48e-40ac-bfbf-35e1f6cc3806" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3066.713775] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/2f533c88-a48e-40ac-bfbf-35e1f6cc3806" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3066.714123] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/2f533c88-a48e-40ac-bfbf-35e1f6cc3806" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3066.714351] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-116c9fd6-ebc0-4f09-ad21-41ae5741d950 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3066.718418] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3066.718418] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f50169-91c0-f632-a2d5-98eacbf561da" [ 3066.718418] env[61663]: _type = "Task" [ 3066.718418] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3066.725502] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f50169-91c0-f632-a2d5-98eacbf561da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3067.228743] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f50169-91c0-f632-a2d5-98eacbf561da, 'name': SearchDatastore_Task, 'duration_secs': 0.007852} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3067.229141] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/2f533c88-a48e-40ac-bfbf-35e1f6cc3806/ts-2024-12-01-04-39-53 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3067.229302] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f04879a3-2c27-4a52-9809-48c0f3c41370 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3067.240897] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/2f533c88-a48e-40ac-bfbf-35e1f6cc3806/ts-2024-12-01-04-39-53 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3067.241045] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 2f533c88-a48e-40ac-bfbf-35e1f6cc3806 is no longer used by this node. Pending deletion! [ 3067.241215] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/2f533c88-a48e-40ac-bfbf-35e1f6cc3806" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3067.241433] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/6affd69f-f611-4b18-8380-abecd0188e4c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3067.241551] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/6affd69f-f611-4b18-8380-abecd0188e4c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3067.241911] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/6affd69f-f611-4b18-8380-abecd0188e4c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3067.242153] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e19953a-95d1-4510-81a7-eed7da996b7e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3067.245998] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3067.245998] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e72628-c348-708c-ac6e-315295f19e9e" [ 3067.245998] env[61663]: _type = "Task" [ 3067.245998] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3067.252973] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e72628-c348-708c-ac6e-315295f19e9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3067.757230] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e72628-c348-708c-ac6e-315295f19e9e, 'name': SearchDatastore_Task, 'duration_secs': 0.008587} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3067.757511] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/6affd69f-f611-4b18-8380-abecd0188e4c is no longer used. Deleting! [ 3067.757673] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/6affd69f-f611-4b18-8380-abecd0188e4c {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3067.757915] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-afb6059a-a8fb-41f5-a681-b6726990a4a5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3067.764097] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3067.764097] env[61663]: value = "task-1690955" [ 3067.764097] env[61663]: _type = "Task" [ 3067.764097] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3067.772159] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690955, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3068.273723] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690955, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.108458} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3068.274087] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3068.274133] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/6affd69f-f611-4b18-8380-abecd0188e4c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3068.274334] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/0261edc7-7f5e-4f74-94f3-2ce5a95787d3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3068.274456] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/0261edc7-7f5e-4f74-94f3-2ce5a95787d3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3068.274764] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/0261edc7-7f5e-4f74-94f3-2ce5a95787d3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3068.275030] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8b517aa-3f15-4742-b693-139cfd55b9ae {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3068.279152] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3068.279152] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525eab3e-0e00-3fbd-7061-a2e0700f47af" [ 3068.279152] env[61663]: _type = "Task" [ 3068.279152] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3068.286234] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525eab3e-0e00-3fbd-7061-a2e0700f47af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3068.790268] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525eab3e-0e00-3fbd-7061-a2e0700f47af, 'name': SearchDatastore_Task, 'duration_secs': 0.008259} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3068.790570] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/0261edc7-7f5e-4f74-94f3-2ce5a95787d3/ts-2024-12-01-04-39-54 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3068.790865] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6ae66821-379b-4c96-8c01-5b0deb31d17c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3068.803367] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/0261edc7-7f5e-4f74-94f3-2ce5a95787d3/ts-2024-12-01-04-39-54 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3068.803518] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 0261edc7-7f5e-4f74-94f3-2ce5a95787d3 is no longer used by this node. Pending deletion! [ 3068.803685] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/0261edc7-7f5e-4f74-94f3-2ce5a95787d3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3068.803904] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/087036a2-0c90-41ce-b02d-a75ed5465d39" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3068.804038] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/087036a2-0c90-41ce-b02d-a75ed5465d39" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3068.804392] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/087036a2-0c90-41ce-b02d-a75ed5465d39" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3068.804653] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-669bc912-1a08-4bb7-b92f-5df6e3ceaa29 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3068.809278] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3068.809278] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529f3587-b258-a12b-1323-e23c8cdf6ae3" [ 3068.809278] env[61663]: _type = "Task" [ 3068.809278] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3068.817290] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529f3587-b258-a12b-1323-e23c8cdf6ae3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3069.321524] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529f3587-b258-a12b-1323-e23c8cdf6ae3, 'name': SearchDatastore_Task, 'duration_secs': 0.008865} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3069.321855] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/087036a2-0c90-41ce-b02d-a75ed5465d39/ts-2024-12-01-04-39-55 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3069.322171] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cf02b041-470f-403f-a1ba-e09ee5987799 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3069.334766] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/087036a2-0c90-41ce-b02d-a75ed5465d39/ts-2024-12-01-04-39-55 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3069.334958] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 087036a2-0c90-41ce-b02d-a75ed5465d39 is no longer used by this node. Pending deletion! [ 3069.335179] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/087036a2-0c90-41ce-b02d-a75ed5465d39" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3069.335442] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/4e5561a3-16c4-4011-9364-3201c12a7f95" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3069.335603] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/4e5561a3-16c4-4011-9364-3201c12a7f95" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3069.335965] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/4e5561a3-16c4-4011-9364-3201c12a7f95" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3069.336278] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e920f70-b8a5-4c74-a634-9438abbd19d6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3069.340953] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3069.340953] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]521bef7b-2611-44cb-47b5-322a89162e1f" [ 3069.340953] env[61663]: _type = "Task" [ 3069.340953] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3069.348858] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]521bef7b-2611-44cb-47b5-322a89162e1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3069.852428] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]521bef7b-2611-44cb-47b5-322a89162e1f, 'name': SearchDatastore_Task, 'duration_secs': 0.008499} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3069.852710] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/4e5561a3-16c4-4011-9364-3201c12a7f95/ts-2024-12-01-04-39-55 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3069.852986] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b33f295-75b9-44c6-a2f8-22bce6b3fb46 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3069.867320] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/4e5561a3-16c4-4011-9364-3201c12a7f95/ts-2024-12-01-04-39-55 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3069.867489] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 4e5561a3-16c4-4011-9364-3201c12a7f95 is no longer used by this node. Pending deletion! [ 3069.867656] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/4e5561a3-16c4-4011-9364-3201c12a7f95" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3069.867879] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/a8fe9483-e037-4ae6-b1cc-2154f00e1cc1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3069.868008] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/a8fe9483-e037-4ae6-b1cc-2154f00e1cc1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3069.868328] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/a8fe9483-e037-4ae6-b1cc-2154f00e1cc1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3069.868598] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59b65c47-8c87-4b34-9acd-786c3ab2073f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3069.874107] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3069.874107] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5207b3bd-8a7c-b71a-cd0a-cbd55f24843e" [ 3069.874107] env[61663]: _type = "Task" [ 3069.874107] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3069.882483] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5207b3bd-8a7c-b71a-cd0a-cbd55f24843e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3070.384551] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5207b3bd-8a7c-b71a-cd0a-cbd55f24843e, 'name': SearchDatastore_Task, 'duration_secs': 0.008994} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3070.384871] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/a8fe9483-e037-4ae6-b1cc-2154f00e1cc1/ts-2024-12-01-04-39-56 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3070.385072] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b7836c0b-ada2-4c27-b06b-013f4baca2fd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3070.396781] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/a8fe9483-e037-4ae6-b1cc-2154f00e1cc1/ts-2024-12-01-04-39-56 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3070.397061] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image a8fe9483-e037-4ae6-b1cc-2154f00e1cc1 is no longer used by this node. Pending deletion! [ 3070.397299] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/a8fe9483-e037-4ae6-b1cc-2154f00e1cc1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3070.397526] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/4584636c-b7be-421e-bd7d-df72be57fd98" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3070.397650] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/4584636c-b7be-421e-bd7d-df72be57fd98" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3070.398041] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/4584636c-b7be-421e-bd7d-df72be57fd98" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3070.398344] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86e16dd9-0680-4c61-a712-4df1da19b05b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3070.402631] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3070.402631] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]524f220d-8ec0-4781-ee65-4b8acd27fcc9" [ 3070.402631] env[61663]: _type = "Task" [ 3070.402631] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3070.410902] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]524f220d-8ec0-4781-ee65-4b8acd27fcc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3070.446918] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquiring lock "a05e73dc-3f4f-4743-8eba-057e8ac4b28d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3070.447153] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Lock "a05e73dc-3f4f-4743-8eba-057e8ac4b28d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3070.457671] env[61663]: DEBUG nova.compute.manager [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] Starting instance... {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 3070.511770] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3070.512046] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3070.513529] env[61663]: INFO nova.compute.claims [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 3070.674190] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef1b1838-49d0-4b42-93fe-add8fe67e3b6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3070.681681] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e675b8-ba5c-4c91-bf88-4553d2ea2103 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3070.712653] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101cef34-5188-4511-b109-522e05fcd53d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3070.719492] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-140ae70f-c246-4e79-9e21-9719a0cf5fa6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3070.732181] env[61663]: DEBUG nova.compute.provider_tree [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 3070.740520] env[61663]: DEBUG nova.scheduler.client.report [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 3070.754044] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.242s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3070.754491] env[61663]: DEBUG nova.compute.manager [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] Start building networks asynchronously for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 3070.785357] env[61663]: DEBUG nova.compute.utils [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Using /dev/sd instead of None {{(pid=61663) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 3070.786801] env[61663]: DEBUG nova.compute.manager [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] Allocating IP information in the background. {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 3070.787105] env[61663]: DEBUG nova.network.neutron [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] allocate_for_instance() {{(pid=61663) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 3070.797306] env[61663]: DEBUG nova.compute.manager [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] Start building block device mappings for instance. {{(pid=61663) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 3070.848310] env[61663]: DEBUG nova.policy [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9ecb588edff64911bf5120de68b010eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8b90f6021c544484902ae30054503895', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61663) authorize /opt/stack/nova/nova/policy.py:203}} [ 3070.861252] env[61663]: DEBUG nova.compute.manager [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] Start spawning the instance on the hypervisor. {{(pid=61663) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 3070.890855] env[61663]: DEBUG nova.virt.hardware [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:57:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:56:57Z,direct_url=,disk_format='vmdk',id=362c8152-fcd0-4f43-acbf-09a2dc376cb2,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='cafa379ce6b143b88e4741a849af1088',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:56:57Z,virtual_size=,visibility=), allow threads: False {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 3070.891126] env[61663]: DEBUG nova.virt.hardware [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Flavor limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 3070.891318] env[61663]: DEBUG nova.virt.hardware [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Image limits 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 3070.891509] env[61663]: DEBUG nova.virt.hardware [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Flavor pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 3070.891662] env[61663]: DEBUG nova.virt.hardware [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Image pref 0:0:0 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 3070.891812] env[61663]: DEBUG nova.virt.hardware [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61663) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 3070.892125] env[61663]: DEBUG nova.virt.hardware [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 3070.892347] env[61663]: DEBUG nova.virt.hardware [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 3070.892565] env[61663]: DEBUG nova.virt.hardware [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Got 1 possible topologies {{(pid=61663) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 3070.892772] env[61663]: DEBUG nova.virt.hardware [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 3070.892993] env[61663]: DEBUG nova.virt.hardware [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61663) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 3070.893888] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d26677-1239-46e6-a575-9175d95df352 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3070.902344] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9825fa80-fa9a-44de-ac57-fb810a4c12dd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3070.922514] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]524f220d-8ec0-4781-ee65-4b8acd27fcc9, 'name': SearchDatastore_Task, 'duration_secs': 0.008129} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3070.922831] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/4584636c-b7be-421e-bd7d-df72be57fd98/ts-2024-12-01-04-39-56 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3070.923467] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0c09eb16-8ed0-46e6-be39-002591278349 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3070.942552] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/4584636c-b7be-421e-bd7d-df72be57fd98/ts-2024-12-01-04-39-56 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3070.942838] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 4584636c-b7be-421e-bd7d-df72be57fd98 is no longer used by this node. Pending deletion! [ 3070.942908] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/4584636c-b7be-421e-bd7d-df72be57fd98" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3070.943108] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/59ffcf85-b25c-4ae5-9589-0b294de7453b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3070.943237] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/59ffcf85-b25c-4ae5-9589-0b294de7453b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3070.943541] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/59ffcf85-b25c-4ae5-9589-0b294de7453b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3070.943811] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d921ed5-24b1-4bad-91d3-2b2bb26c3612 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3070.948145] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3070.948145] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52522233-39be-0f0e-7b73-b171fcc62adc" [ 3070.948145] env[61663]: _type = "Task" [ 3070.948145] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3070.955791] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52522233-39be-0f0e-7b73-b171fcc62adc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3071.309778] env[61663]: DEBUG nova.network.neutron [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] Successfully created port: 3a1ef3a0-08d8-461f-a3ae-4c5341ff5ad4 {{(pid=61663) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 3071.460483] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52522233-39be-0f0e-7b73-b171fcc62adc, 'name': SearchDatastore_Task, 'duration_secs': 0.007975} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3071.460878] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/59ffcf85-b25c-4ae5-9589-0b294de7453b/ts-2024-12-01-04-39-57 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3071.461268] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ffe98462-0459-4dea-9452-68a172f47256 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3071.473315] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/59ffcf85-b25c-4ae5-9589-0b294de7453b/ts-2024-12-01-04-39-57 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3071.473450] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 59ffcf85-b25c-4ae5-9589-0b294de7453b is no longer used by this node. Pending deletion! [ 3071.473507] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/59ffcf85-b25c-4ae5-9589-0b294de7453b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3071.473730] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/28041873-24f7-4d77-bed8-a078ea1c7b7f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3071.473849] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/28041873-24f7-4d77-bed8-a078ea1c7b7f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3071.474195] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/28041873-24f7-4d77-bed8-a078ea1c7b7f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3071.474455] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b00272cb-6e45-4d86-b4dc-792fc61c48d2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3071.479107] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3071.479107] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52491e4f-1a5b-e4f6-b32c-4afc2af55468" [ 3071.479107] env[61663]: _type = "Task" [ 3071.479107] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3071.488670] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52491e4f-1a5b-e4f6-b32c-4afc2af55468, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3071.990705] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52491e4f-1a5b-e4f6-b32c-4afc2af55468, 'name': SearchDatastore_Task, 'duration_secs': 0.008878} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3071.990705] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/28041873-24f7-4d77-bed8-a078ea1c7b7f is no longer used. Deleting! [ 3071.990705] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/28041873-24f7-4d77-bed8-a078ea1c7b7f {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3071.990705] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7846f197-9ba2-451c-b7fe-ca80625bd0b0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3071.997043] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3071.997043] env[61663]: value = "task-1690956" [ 3071.997043] env[61663]: _type = "Task" [ 3071.997043] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3072.005153] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690956, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3072.132048] env[61663]: DEBUG nova.compute.manager [req-73b8cdf0-e4f6-4794-9992-97aa8911179d req-5f5ccefd-b822-4125-b3e4-57bb35b8bea5 service nova] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] Received event network-vif-plugged-3a1ef3a0-08d8-461f-a3ae-4c5341ff5ad4 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 3072.132048] env[61663]: DEBUG oslo_concurrency.lockutils [req-73b8cdf0-e4f6-4794-9992-97aa8911179d req-5f5ccefd-b822-4125-b3e4-57bb35b8bea5 service nova] Acquiring lock "a05e73dc-3f4f-4743-8eba-057e8ac4b28d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3072.132048] env[61663]: DEBUG oslo_concurrency.lockutils [req-73b8cdf0-e4f6-4794-9992-97aa8911179d req-5f5ccefd-b822-4125-b3e4-57bb35b8bea5 service nova] Lock "a05e73dc-3f4f-4743-8eba-057e8ac4b28d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3072.132048] env[61663]: DEBUG oslo_concurrency.lockutils [req-73b8cdf0-e4f6-4794-9992-97aa8911179d req-5f5ccefd-b822-4125-b3e4-57bb35b8bea5 service nova] Lock "a05e73dc-3f4f-4743-8eba-057e8ac4b28d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3072.132048] env[61663]: DEBUG nova.compute.manager [req-73b8cdf0-e4f6-4794-9992-97aa8911179d req-5f5ccefd-b822-4125-b3e4-57bb35b8bea5 service nova] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] No waiting events found dispatching network-vif-plugged-3a1ef3a0-08d8-461f-a3ae-4c5341ff5ad4 {{(pid=61663) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 3072.132456] env[61663]: WARNING nova.compute.manager [req-73b8cdf0-e4f6-4794-9992-97aa8911179d req-5f5ccefd-b822-4125-b3e4-57bb35b8bea5 service nova] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] Received unexpected event network-vif-plugged-3a1ef3a0-08d8-461f-a3ae-4c5341ff5ad4 for instance with vm_state building and task_state spawning. [ 3072.294163] env[61663]: DEBUG nova.network.neutron [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] Successfully updated port: 3a1ef3a0-08d8-461f-a3ae-4c5341ff5ad4 {{(pid=61663) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 3072.309712] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquiring lock "refresh_cache-a05e73dc-3f4f-4743-8eba-057e8ac4b28d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3072.309712] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquired lock "refresh_cache-a05e73dc-3f4f-4743-8eba-057e8ac4b28d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3072.309712] env[61663]: DEBUG nova.network.neutron [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] Building network info cache for instance {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 3072.349009] env[61663]: DEBUG nova.network.neutron [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] Instance cache missing network info. {{(pid=61663) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 3072.510046] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690956, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.106762} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3072.510046] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3072.510046] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/28041873-24f7-4d77-bed8-a078ea1c7b7f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3072.510046] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/7e4d7716-8ed2-43e1-a478-882377304e95" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3072.510046] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/7e4d7716-8ed2-43e1-a478-882377304e95" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3072.510046] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/7e4d7716-8ed2-43e1-a478-882377304e95" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3072.510046] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f60e0c60-8083-4f8a-abf9-81a3a393c105 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3072.516643] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3072.516643] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526dbdc8-e243-852c-bb78-eea23b55df78" [ 3072.516643] env[61663]: _type = "Task" [ 3072.516643] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3072.524523] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526dbdc8-e243-852c-bb78-eea23b55df78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3072.580301] env[61663]: DEBUG nova.network.neutron [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] Updating instance_info_cache with network_info: [{"id": "3a1ef3a0-08d8-461f-a3ae-4c5341ff5ad4", "address": "fa:16:3e:8d:5f:bf", "network": {"id": "bd3a627c-746f-4e8f-8223-f0f3b30c965b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-924597021-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b90f6021c544484902ae30054503895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a1ef3a0-08", "ovs_interfaceid": "3a1ef3a0-08d8-461f-a3ae-4c5341ff5ad4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3072.597310] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Releasing lock "refresh_cache-a05e73dc-3f4f-4743-8eba-057e8ac4b28d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3072.597851] env[61663]: DEBUG nova.compute.manager [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] Instance network_info: |[{"id": "3a1ef3a0-08d8-461f-a3ae-4c5341ff5ad4", "address": "fa:16:3e:8d:5f:bf", "network": {"id": "bd3a627c-746f-4e8f-8223-f0f3b30c965b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-924597021-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b90f6021c544484902ae30054503895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a1ef3a0-08", "ovs_interfaceid": "3a1ef3a0-08d8-461f-a3ae-4c5341ff5ad4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61663) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 3072.598689] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:5f:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1c797172-a569-458e-aeb0-3f21e589a740', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3a1ef3a0-08d8-461f-a3ae-4c5341ff5ad4', 'vif_model': 'vmxnet3'}] {{(pid=61663) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 3072.607026] env[61663]: DEBUG oslo.service.loopingcall [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 3072.607026] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] Creating VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 3072.607026] env[61663]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-029269fd-89a8-4b14-898c-fd9c3a4e17ae {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3072.627300] env[61663]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 3072.627300] env[61663]: value = "task-1690957" [ 3072.627300] env[61663]: _type = "Task" [ 3072.627300] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3072.635470] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690957, 'name': CreateVM_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3073.029095] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526dbdc8-e243-852c-bb78-eea23b55df78, 'name': SearchDatastore_Task, 'duration_secs': 0.008827} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3073.029095] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/7e4d7716-8ed2-43e1-a478-882377304e95/ts-2024-12-01-04-39-58 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3073.029095] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3ba68dc-89e6-4994-b9f0-9420d16c0455 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3073.041738] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/7e4d7716-8ed2-43e1-a478-882377304e95/ts-2024-12-01-04-39-58 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3073.042110] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 7e4d7716-8ed2-43e1-a478-882377304e95 is no longer used by this node. Pending deletion! [ 3073.042474] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/7e4d7716-8ed2-43e1-a478-882377304e95" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3073.044079] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/7c0b3e70-5839-4e9c-b269-c8ce8fae9c76" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3073.044079] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/7c0b3e70-5839-4e9c-b269-c8ce8fae9c76" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3073.044079] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/7c0b3e70-5839-4e9c-b269-c8ce8fae9c76" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3073.044079] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-596a09b5-e147-4b58-a20d-2c6af85d0ee1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3073.047719] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3073.047719] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b1f6f1-4374-5702-def1-a2a40d64fd7b" [ 3073.047719] env[61663]: _type = "Task" [ 3073.047719] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3073.056158] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b1f6f1-4374-5702-def1-a2a40d64fd7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3073.138354] env[61663]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690957, 'name': CreateVM_Task, 'duration_secs': 0.298941} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3073.138717] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] Created VM on the ESX host {{(pid=61663) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 3073.139509] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3073.139843] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3073.140345] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3073.140710] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a38f66a0-6e4b-4564-8f7d-993838ad3f86 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3073.144953] env[61663]: DEBUG oslo_vmware.api [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Waiting for the task: (returnval){ [ 3073.144953] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52dacb62-cea7-cd60-c6f1-da9fff58f354" [ 3073.144953] env[61663]: _type = "Task" [ 3073.144953] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3073.155829] env[61663]: DEBUG oslo_vmware.api [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52dacb62-cea7-cd60-c6f1-da9fff58f354, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3073.562065] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b1f6f1-4374-5702-def1-a2a40d64fd7b, 'name': SearchDatastore_Task, 'duration_secs': 0.009046} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3073.562065] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/7c0b3e70-5839-4e9c-b269-c8ce8fae9c76 is no longer used. Deleting! [ 3073.562065] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/7c0b3e70-5839-4e9c-b269-c8ce8fae9c76 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3073.562065] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f083b208-debc-4549-b647-a0b56942c173 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3073.567222] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3073.567222] env[61663]: value = "task-1690958" [ 3073.567222] env[61663]: _type = "Task" [ 3073.567222] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3073.574498] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690958, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3073.655193] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3073.656025] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] Processing image 362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 3073.656025] env[61663]: DEBUG oslo_concurrency.lockutils [None req-b119af8f-b8ca-444e-a157-4740fa44f22c tempest-DeleteServersTestJSON-2108010812 tempest-DeleteServersTestJSON-2108010812-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3074.079498] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690958, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136772} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3074.079498] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3074.079498] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/7c0b3e70-5839-4e9c-b269-c8ce8fae9c76" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3074.079498] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/5217f1ee-2c42-43db-89e1-044912eb203c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3074.079498] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/5217f1ee-2c42-43db-89e1-044912eb203c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3074.079498] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/5217f1ee-2c42-43db-89e1-044912eb203c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3074.079498] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5435b6ff-087f-4595-95f9-08b560c2e4c5 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3074.084018] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3074.084018] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f0e566-9ddd-1638-d6be-083f37be20b2" [ 3074.084018] env[61663]: _type = "Task" [ 3074.084018] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3074.089855] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f0e566-9ddd-1638-d6be-083f37be20b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3074.167162] env[61663]: DEBUG nova.compute.manager [req-d011642d-4ecd-4989-80e5-7ef1fc7b9555 req-ce77955f-adff-4e64-ad16-ab53b47d2f69 service nova] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] Received event network-changed-3a1ef3a0-08d8-461f-a3ae-4c5341ff5ad4 {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 3074.167162] env[61663]: DEBUG nova.compute.manager [req-d011642d-4ecd-4989-80e5-7ef1fc7b9555 req-ce77955f-adff-4e64-ad16-ab53b47d2f69 service nova] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] Refreshing instance network info cache due to event network-changed-3a1ef3a0-08d8-461f-a3ae-4c5341ff5ad4. {{(pid=61663) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 3074.167162] env[61663]: DEBUG oslo_concurrency.lockutils [req-d011642d-4ecd-4989-80e5-7ef1fc7b9555 req-ce77955f-adff-4e64-ad16-ab53b47d2f69 service nova] Acquiring lock "refresh_cache-a05e73dc-3f4f-4743-8eba-057e8ac4b28d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3074.167162] env[61663]: DEBUG oslo_concurrency.lockutils [req-d011642d-4ecd-4989-80e5-7ef1fc7b9555 req-ce77955f-adff-4e64-ad16-ab53b47d2f69 service nova] Acquired lock "refresh_cache-a05e73dc-3f4f-4743-8eba-057e8ac4b28d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3074.167162] env[61663]: DEBUG nova.network.neutron [req-d011642d-4ecd-4989-80e5-7ef1fc7b9555 req-ce77955f-adff-4e64-ad16-ab53b47d2f69 service nova] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] Refreshing network info cache for port 3a1ef3a0-08d8-461f-a3ae-4c5341ff5ad4 {{(pid=61663) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 3074.462252] env[61663]: DEBUG nova.network.neutron [req-d011642d-4ecd-4989-80e5-7ef1fc7b9555 req-ce77955f-adff-4e64-ad16-ab53b47d2f69 service nova] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] Updated VIF entry in instance network info cache for port 3a1ef3a0-08d8-461f-a3ae-4c5341ff5ad4. {{(pid=61663) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 3074.462252] env[61663]: DEBUG nova.network.neutron [req-d011642d-4ecd-4989-80e5-7ef1fc7b9555 req-ce77955f-adff-4e64-ad16-ab53b47d2f69 service nova] [instance: a05e73dc-3f4f-4743-8eba-057e8ac4b28d] Updating instance_info_cache with network_info: [{"id": "3a1ef3a0-08d8-461f-a3ae-4c5341ff5ad4", "address": "fa:16:3e:8d:5f:bf", "network": {"id": "bd3a627c-746f-4e8f-8223-f0f3b30c965b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-924597021-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b90f6021c544484902ae30054503895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a1ef3a0-08", "ovs_interfaceid": "3a1ef3a0-08d8-461f-a3ae-4c5341ff5ad4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3074.474498] env[61663]: DEBUG oslo_concurrency.lockutils [req-d011642d-4ecd-4989-80e5-7ef1fc7b9555 req-ce77955f-adff-4e64-ad16-ab53b47d2f69 service nova] Releasing lock "refresh_cache-a05e73dc-3f4f-4743-8eba-057e8ac4b28d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3074.594878] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f0e566-9ddd-1638-d6be-083f37be20b2, 'name': SearchDatastore_Task, 'duration_secs': 0.009256} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3074.594878] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/5217f1ee-2c42-43db-89e1-044912eb203c is no longer used. Deleting! [ 3074.594878] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/5217f1ee-2c42-43db-89e1-044912eb203c {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3074.594878] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-39ba6675-7c09-4e56-b2c0-24f299f0acfa {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3074.599207] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3074.599207] env[61663]: value = "task-1690959" [ 3074.599207] env[61663]: _type = "Task" [ 3074.599207] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3074.607276] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690959, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3075.113021] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690959, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.120753} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3075.113021] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3075.113021] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/5217f1ee-2c42-43db-89e1-044912eb203c" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3075.113021] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/bf3dc957-65e5-43eb-8a36-e7f2874bb02d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3075.113021] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/bf3dc957-65e5-43eb-8a36-e7f2874bb02d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3075.113021] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/bf3dc957-65e5-43eb-8a36-e7f2874bb02d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3075.113021] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47cf84c9-f16e-425a-a2c3-9caac243aec9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3075.116393] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3075.116393] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52774a75-b54c-d9f0-048f-930834a8e25a" [ 3075.116393] env[61663]: _type = "Task" [ 3075.116393] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3075.124143] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52774a75-b54c-d9f0-048f-930834a8e25a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3075.634021] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52774a75-b54c-d9f0-048f-930834a8e25a, 'name': SearchDatastore_Task, 'duration_secs': 0.009057} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3075.634021] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/bf3dc957-65e5-43eb-8a36-e7f2874bb02d/ts-2024-12-01-04-40-01 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3075.634021] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af9c2540-b9e5-4b6d-8afb-74355980e683 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3075.653638] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/bf3dc957-65e5-43eb-8a36-e7f2874bb02d/ts-2024-12-01-04-40-01 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3075.654160] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image bf3dc957-65e5-43eb-8a36-e7f2874bb02d is no longer used by this node. Pending deletion! [ 3075.654464] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/bf3dc957-65e5-43eb-8a36-e7f2874bb02d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3075.654832] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/8b33ecd1-b27f-45ab-a6b6-0568e78559a3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3075.656024] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/8b33ecd1-b27f-45ab-a6b6-0568e78559a3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3075.656024] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/8b33ecd1-b27f-45ab-a6b6-0568e78559a3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3075.656024] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a75f753f-d9eb-4eaf-b680-67d78a3a8c49 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3075.663177] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3075.663177] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529554fc-ca50-7628-a204-ffa60fd95e04" [ 3075.663177] env[61663]: _type = "Task" [ 3075.663177] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3075.672448] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529554fc-ca50-7628-a204-ffa60fd95e04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3076.171710] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]529554fc-ca50-7628-a204-ffa60fd95e04, 'name': SearchDatastore_Task, 'duration_secs': 0.014817} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3076.172110] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/8b33ecd1-b27f-45ab-a6b6-0568e78559a3/ts-2024-12-01-04-40-02 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3076.172545] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2bba939b-fe12-43e6-9e70-c16890c65c32 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3076.187159] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/8b33ecd1-b27f-45ab-a6b6-0568e78559a3/ts-2024-12-01-04-40-02 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3076.187356] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 8b33ecd1-b27f-45ab-a6b6-0568e78559a3 is no longer used by this node. Pending deletion! [ 3076.187550] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/8b33ecd1-b27f-45ab-a6b6-0568e78559a3" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3076.187797] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/d0c5e98b-e37c-408a-94ba-e852d7be8ab9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3076.187944] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/d0c5e98b-e37c-408a-94ba-e852d7be8ab9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3076.188288] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/d0c5e98b-e37c-408a-94ba-e852d7be8ab9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3076.188558] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea6788ef-23a7-43f0-bf3e-243bcc99685b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3076.192588] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3076.192588] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]523555d0-aa03-9a3f-93ca-1225a2602df8" [ 3076.192588] env[61663]: _type = "Task" [ 3076.192588] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3076.199764] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]523555d0-aa03-9a3f-93ca-1225a2602df8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3076.704456] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]523555d0-aa03-9a3f-93ca-1225a2602df8, 'name': SearchDatastore_Task, 'duration_secs': 0.011933} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3076.704456] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/d0c5e98b-e37c-408a-94ba-e852d7be8ab9/ts-2024-12-01-04-40-02 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3076.704790] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-078d12d7-ef21-4e4f-b9a9-b3d6859589e1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3076.718675] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/d0c5e98b-e37c-408a-94ba-e852d7be8ab9/ts-2024-12-01-04-40-02 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3076.718863] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image d0c5e98b-e37c-408a-94ba-e852d7be8ab9 is no longer used by this node. Pending deletion! [ 3076.719098] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/d0c5e98b-e37c-408a-94ba-e852d7be8ab9" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3076.719365] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/274f8b38-c171-418d-99d0-87909ae4fd43" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3076.719514] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/274f8b38-c171-418d-99d0-87909ae4fd43" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3076.719850] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/274f8b38-c171-418d-99d0-87909ae4fd43" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3076.720142] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bd87cd3-ce53-4c5c-a64c-48c09fb85266 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3076.724377] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3076.724377] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5235a09d-1d31-5e88-bd2e-7067975c9532" [ 3076.724377] env[61663]: _type = "Task" [ 3076.724377] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3076.732687] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5235a09d-1d31-5e88-bd2e-7067975c9532, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3077.234300] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5235a09d-1d31-5e88-bd2e-7067975c9532, 'name': SearchDatastore_Task, 'duration_secs': 0.010982} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3077.234643] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/274f8b38-c171-418d-99d0-87909ae4fd43/ts-2024-12-01-04-40-03 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3077.234931] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4b97e2ba-20d9-4316-bb9a-8c9f547cdd54 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3077.248067] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/274f8b38-c171-418d-99d0-87909ae4fd43/ts-2024-12-01-04-40-03 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3077.248213] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 274f8b38-c171-418d-99d0-87909ae4fd43 is no longer used by this node. Pending deletion! [ 3077.248377] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/274f8b38-c171-418d-99d0-87909ae4fd43" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3077.248586] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/6f096335-3f4b-4b38-a0c6-755cf12cfe92" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3077.248705] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/6f096335-3f4b-4b38-a0c6-755cf12cfe92" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3077.249036] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/6f096335-3f4b-4b38-a0c6-755cf12cfe92" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3077.249268] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c22c510-efac-4671-8e91-08d80fa8531a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3077.253251] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3077.253251] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a068bc-f3e3-061b-891c-64cbd94500be" [ 3077.253251] env[61663]: _type = "Task" [ 3077.253251] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3077.260238] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a068bc-f3e3-061b-891c-64cbd94500be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3077.763566] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a068bc-f3e3-061b-891c-64cbd94500be, 'name': SearchDatastore_Task, 'duration_secs': 0.008976} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3077.764114] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/6f096335-3f4b-4b38-a0c6-755cf12cfe92/ts-2024-12-01-04-40-03 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3077.764114] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e15b9ed8-3226-443a-a42d-d33846ecad06 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3077.776616] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/6f096335-3f4b-4b38-a0c6-755cf12cfe92/ts-2024-12-01-04-40-03 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3077.776745] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 6f096335-3f4b-4b38-a0c6-755cf12cfe92 is no longer used by this node. Pending deletion! [ 3077.777045] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/6f096335-3f4b-4b38-a0c6-755cf12cfe92" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3077.777288] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/d4ce843e-84da-4727-8ab4-f2ad23a9a045" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3077.777409] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/d4ce843e-84da-4727-8ab4-f2ad23a9a045" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3077.777734] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/d4ce843e-84da-4727-8ab4-f2ad23a9a045" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3077.777986] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08c1b787-9999-4688-96cc-42f4a01f2f31 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3077.782221] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3077.782221] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b7697a-547d-fe56-b7fd-ed7424d6817a" [ 3077.782221] env[61663]: _type = "Task" [ 3077.782221] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3077.789939] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b7697a-547d-fe56-b7fd-ed7424d6817a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3078.295130] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b7697a-547d-fe56-b7fd-ed7424d6817a, 'name': SearchDatastore_Task, 'duration_secs': 0.008202} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3078.295130] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/d4ce843e-84da-4727-8ab4-f2ad23a9a045/ts-2024-12-01-04-40-04 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3078.295130] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3f8b3f29-aa90-4ee3-99ad-4f0edf911ec4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3078.306050] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/d4ce843e-84da-4727-8ab4-f2ad23a9a045/ts-2024-12-01-04-40-04 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3078.306203] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image d4ce843e-84da-4727-8ab4-f2ad23a9a045 is no longer used by this node. Pending deletion! [ 3078.306357] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/d4ce843e-84da-4727-8ab4-f2ad23a9a045" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3078.306573] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/5f4d96de-0292-4ce8-9f0c-6409be8c5a51" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3078.306686] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/5f4d96de-0292-4ce8-9f0c-6409be8c5a51" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3078.307192] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/5f4d96de-0292-4ce8-9f0c-6409be8c5a51" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3078.307465] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d8acd28-af42-44e0-9852-10414d9fc92c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3078.312353] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3078.312353] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527d774b-18f6-4d4b-7a6c-6aa424ee53f3" [ 3078.312353] env[61663]: _type = "Task" [ 3078.312353] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3078.323305] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527d774b-18f6-4d4b-7a6c-6aa424ee53f3, 'name': SearchDatastore_Task, 'duration_secs': 0.008304} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3078.323509] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/5f4d96de-0292-4ce8-9f0c-6409be8c5a51/ts-2024-12-01-04-40-04 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3078.323749] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-998c42fd-db3a-4923-bcb3-6abda4c275e7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3078.334514] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/5f4d96de-0292-4ce8-9f0c-6409be8c5a51/ts-2024-12-01-04-40-04 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3078.334644] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 5f4d96de-0292-4ce8-9f0c-6409be8c5a51 is no longer used by this node. Pending deletion! [ 3078.334804] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/5f4d96de-0292-4ce8-9f0c-6409be8c5a51" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3078.335020] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/5a979cc2-3736-4678-832c-a14533d41878" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3078.335146] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/5a979cc2-3736-4678-832c-a14533d41878" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3078.335581] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/5a979cc2-3736-4678-832c-a14533d41878" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3078.335697] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-051be885-f636-4704-abf7-2a177b1b31c7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3078.339781] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3078.339781] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5212f7f1-b37c-8603-b91b-3bd80a926e3c" [ 3078.339781] env[61663]: _type = "Task" [ 3078.339781] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3078.348462] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5212f7f1-b37c-8603-b91b-3bd80a926e3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3078.850961] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5212f7f1-b37c-8603-b91b-3bd80a926e3c, 'name': SearchDatastore_Task, 'duration_secs': 0.008425} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3078.851546] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/5a979cc2-3736-4678-832c-a14533d41878/ts-2024-12-01-04-40-04 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3078.851546] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f45a872-b7df-45b3-8fe9-0221164c8354 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3078.864925] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/5a979cc2-3736-4678-832c-a14533d41878/ts-2024-12-01-04-40-04 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3078.865092] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 5a979cc2-3736-4678-832c-a14533d41878 is no longer used by this node. Pending deletion! [ 3078.865257] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/5a979cc2-3736-4678-832c-a14533d41878" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3078.865475] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/324925c5-85b7-4839-886c-fbbd700962db" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3078.865592] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/324925c5-85b7-4839-886c-fbbd700962db" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3078.865903] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/324925c5-85b7-4839-886c-fbbd700962db" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3078.866165] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b91832ac-1725-4b84-a83f-1023e6cb3db3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3078.870520] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3078.870520] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b5733e-ad1e-b2d4-a064-0e6f7f6bb1e3" [ 3078.870520] env[61663]: _type = "Task" [ 3078.870520] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3078.877834] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b5733e-ad1e-b2d4-a064-0e6f7f6bb1e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3079.048270] env[61663]: DEBUG oslo_concurrency.lockutils [None req-4143056f-706c-481f-a2b8-b1f9e7a111d7 tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquiring lock "b84c5391-c337-4e45-823d-5779df22a116" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3079.381074] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b5733e-ad1e-b2d4-a064-0e6f7f6bb1e3, 'name': SearchDatastore_Task, 'duration_secs': 0.008149} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3079.381354] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/324925c5-85b7-4839-886c-fbbd700962db/ts-2024-12-01-04-40-05 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3079.381625] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1a47d06-1268-434c-a554-ae6c666fd16f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3079.393192] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/324925c5-85b7-4839-886c-fbbd700962db/ts-2024-12-01-04-40-05 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3079.393331] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 324925c5-85b7-4839-886c-fbbd700962db is no longer used by this node. Pending deletion! [ 3079.393464] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/324925c5-85b7-4839-886c-fbbd700962db" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3079.393693] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/39c12972-ee18-4321-a1aa-7b6266024ca5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3079.393810] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/39c12972-ee18-4321-a1aa-7b6266024ca5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3079.394137] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/39c12972-ee18-4321-a1aa-7b6266024ca5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3079.394368] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70ecc77f-5f93-40cf-8954-5a8e733f7d65 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3079.398591] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3079.398591] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d6dbce-3ce7-05b2-8f4b-8e5c4467709b" [ 3079.398591] env[61663]: _type = "Task" [ 3079.398591] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3079.408223] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d6dbce-3ce7-05b2-8f4b-8e5c4467709b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3079.908970] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d6dbce-3ce7-05b2-8f4b-8e5c4467709b, 'name': SearchDatastore_Task, 'duration_secs': 0.009087} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3079.909452] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/39c12972-ee18-4321-a1aa-7b6266024ca5/ts-2024-12-01-04-40-05 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3079.909527] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf659b86-25ce-4fcd-bf2f-fd333fdbee05 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3079.921018] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/39c12972-ee18-4321-a1aa-7b6266024ca5/ts-2024-12-01-04-40-05 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3079.921192] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 39c12972-ee18-4321-a1aa-7b6266024ca5 is no longer used by this node. Pending deletion! [ 3079.921321] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/39c12972-ee18-4321-a1aa-7b6266024ca5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3079.921535] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/f4ffac9c-0cea-4256-b264-b625ad42f8d0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3079.921652] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/f4ffac9c-0cea-4256-b264-b625ad42f8d0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3079.921961] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/f4ffac9c-0cea-4256-b264-b625ad42f8d0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3079.922227] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ec876cc-a94e-4041-a95a-27e0a53f56a7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3079.926056] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3079.926056] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527f23a3-ebce-0725-3b70-7332ce13d4b3" [ 3079.926056] env[61663]: _type = "Task" [ 3079.926056] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3079.933303] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527f23a3-ebce-0725-3b70-7332ce13d4b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3080.436027] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527f23a3-ebce-0725-3b70-7332ce13d4b3, 'name': SearchDatastore_Task, 'duration_secs': 0.007988} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3080.436310] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/f4ffac9c-0cea-4256-b264-b625ad42f8d0/ts-2024-12-01-04-40-06 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3080.436567] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-78c1bef9-8403-4277-9a85-339a25b98022 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3080.448398] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/f4ffac9c-0cea-4256-b264-b625ad42f8d0/ts-2024-12-01-04-40-06 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3080.448542] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image f4ffac9c-0cea-4256-b264-b625ad42f8d0 is no longer used by this node. Pending deletion! [ 3080.448702] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/f4ffac9c-0cea-4256-b264-b625ad42f8d0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3080.448912] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/7a03b57e-1997-4cbf-a1a6-49663b00ea16" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3080.449085] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/7a03b57e-1997-4cbf-a1a6-49663b00ea16" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3080.449404] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/7a03b57e-1997-4cbf-a1a6-49663b00ea16" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3080.449642] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebd6d7f9-0455-4d63-96e6-ce25107d80c1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3080.453723] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3080.453723] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525f9e77-a38c-690e-f35d-bdc5f6de0e5c" [ 3080.453723] env[61663]: _type = "Task" [ 3080.453723] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3080.460892] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525f9e77-a38c-690e-f35d-bdc5f6de0e5c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3080.964079] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525f9e77-a38c-690e-f35d-bdc5f6de0e5c, 'name': SearchDatastore_Task, 'duration_secs': 0.007845} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3080.964713] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/7a03b57e-1997-4cbf-a1a6-49663b00ea16/ts-2024-12-01-04-40-06 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3080.964713] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-425a4fa7-2793-49dd-b654-f009d5f10aa8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3080.976366] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/7a03b57e-1997-4cbf-a1a6-49663b00ea16/ts-2024-12-01-04-40-06 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3080.976569] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 7a03b57e-1997-4cbf-a1a6-49663b00ea16 is no longer used by this node. Pending deletion! [ 3080.976661] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/7a03b57e-1997-4cbf-a1a6-49663b00ea16" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3080.976876] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/81ea470f-b747-46e9-8175-4c3995b328cc" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3080.977011] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/81ea470f-b747-46e9-8175-4c3995b328cc" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3080.977465] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/81ea470f-b747-46e9-8175-4c3995b328cc" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3080.977725] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bba0f415-9757-413b-b656-65d474580b2c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3080.981946] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3080.981946] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a35131-86d7-a38c-fd60-342b19269dc6" [ 3080.981946] env[61663]: _type = "Task" [ 3080.981946] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3080.989222] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a35131-86d7-a38c-fd60-342b19269dc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3081.493192] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a35131-86d7-a38c-fd60-342b19269dc6, 'name': SearchDatastore_Task, 'duration_secs': 0.007681} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3081.493453] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/81ea470f-b747-46e9-8175-4c3995b328cc/ts-2024-12-01-04-40-07 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3081.493705] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e601f8a9-159b-4fd6-9e63-e01a1fb0dd21 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3081.505452] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/81ea470f-b747-46e9-8175-4c3995b328cc/ts-2024-12-01-04-40-07 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3081.505612] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 81ea470f-b747-46e9-8175-4c3995b328cc is no longer used by this node. Pending deletion! [ 3081.505746] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/81ea470f-b747-46e9-8175-4c3995b328cc" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3081.505949] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/1c88f672-e231-4c87-ad1f-4024af2f8646" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3081.506079] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/1c88f672-e231-4c87-ad1f-4024af2f8646" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3081.506389] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/1c88f672-e231-4c87-ad1f-4024af2f8646" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3081.506617] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49ff1290-695c-494c-a530-196a97962f50 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3081.510674] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3081.510674] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52eae45f-5d73-ce32-958b-de180f69fa4d" [ 3081.510674] env[61663]: _type = "Task" [ 3081.510674] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3081.518208] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52eae45f-5d73-ce32-958b-de180f69fa4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3082.021349] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52eae45f-5d73-ce32-958b-de180f69fa4d, 'name': SearchDatastore_Task, 'duration_secs': 0.008049} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3082.021739] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/1c88f672-e231-4c87-ad1f-4024af2f8646/ts-2024-12-01-04-40-07 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3082.021922] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-514fe4e9-ecdd-40d1-af1a-0f1ff2431403 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3082.033964] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/1c88f672-e231-4c87-ad1f-4024af2f8646/ts-2024-12-01-04-40-07 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3082.034136] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 1c88f672-e231-4c87-ad1f-4024af2f8646 is no longer used by this node. Pending deletion! [ 3082.034286] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/1c88f672-e231-4c87-ad1f-4024af2f8646" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3082.034510] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/e61982ff-2eab-4360-af0d-c85cced86448" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3082.034627] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/e61982ff-2eab-4360-af0d-c85cced86448" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3082.034951] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/e61982ff-2eab-4360-af0d-c85cced86448" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3082.035214] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84519282-2a78-41b5-ae6e-dd87af341cac {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3082.039437] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3082.039437] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5229e14a-816e-0638-1c09-e9fe9df2b166" [ 3082.039437] env[61663]: _type = "Task" [ 3082.039437] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3082.047271] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5229e14a-816e-0638-1c09-e9fe9df2b166, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3082.549739] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5229e14a-816e-0638-1c09-e9fe9df2b166, 'name': SearchDatastore_Task, 'duration_secs': 0.007981} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3082.550104] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/e61982ff-2eab-4360-af0d-c85cced86448/ts-2024-12-01-04-40-08 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3082.550386] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4dbc18a1-5d3c-46c6-9e0b-6e14cd39a4e9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3082.562299] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/e61982ff-2eab-4360-af0d-c85cced86448/ts-2024-12-01-04-40-08 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3082.562448] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image e61982ff-2eab-4360-af0d-c85cced86448 is no longer used by this node. Pending deletion! [ 3082.562613] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/e61982ff-2eab-4360-af0d-c85cced86448" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3082.562826] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/dd704e9b-706f-4be3-9d1a-c230b84f4212" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3082.562943] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/dd704e9b-706f-4be3-9d1a-c230b84f4212" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3082.563266] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/dd704e9b-706f-4be3-9d1a-c230b84f4212" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3082.563511] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c72e2c1-de46-4cb9-92c9-30bcf3abd48c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3082.567912] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3082.567912] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525136b2-c0fc-7714-64e4-7b705a18c019" [ 3082.567912] env[61663]: _type = "Task" [ 3082.567912] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3082.574899] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525136b2-c0fc-7714-64e4-7b705a18c019, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3083.078367] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525136b2-c0fc-7714-64e4-7b705a18c019, 'name': SearchDatastore_Task, 'duration_secs': 0.008123} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3083.078810] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/dd704e9b-706f-4be3-9d1a-c230b84f4212/ts-2024-12-01-04-40-08 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3083.078900] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12aba1c4-4910-4db0-829a-5f90a2386d52 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3083.091941] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/dd704e9b-706f-4be3-9d1a-c230b84f4212/ts-2024-12-01-04-40-08 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3083.092207] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image dd704e9b-706f-4be3-9d1a-c230b84f4212 is no longer used by this node. Pending deletion! [ 3083.092305] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/dd704e9b-706f-4be3-9d1a-c230b84f4212" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3083.092544] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/2def6e64-787e-40e7-8ac0-1ba51bf25021" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3083.092661] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/2def6e64-787e-40e7-8ac0-1ba51bf25021" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3083.093011] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/2def6e64-787e-40e7-8ac0-1ba51bf25021" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3083.093303] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3761f57-5c3c-4e0f-9345-5edcb68f7672 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3083.097967] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3083.097967] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5253929b-0c33-b925-f0bb-485891cbf5ab" [ 3083.097967] env[61663]: _type = "Task" [ 3083.097967] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3083.106054] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5253929b-0c33-b925-f0bb-485891cbf5ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3083.611876] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5253929b-0c33-b925-f0bb-485891cbf5ab, 'name': SearchDatastore_Task, 'duration_secs': 0.008771} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3083.611876] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/2def6e64-787e-40e7-8ac0-1ba51bf25021/ts-2024-12-01-04-40-09 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3083.611876] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12253d01-df9b-45c6-b4a5-76409c4c3d5c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3083.625183] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/2def6e64-787e-40e7-8ac0-1ba51bf25021/ts-2024-12-01-04-40-09 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3083.626812] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 2def6e64-787e-40e7-8ac0-1ba51bf25021 is no longer used by this node. Pending deletion! [ 3083.627011] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/2def6e64-787e-40e7-8ac0-1ba51bf25021" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3083.627252] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/6670d9b8-a339-4e0a-82f2-5b10f0a7fa89" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3083.627374] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/6670d9b8-a339-4e0a-82f2-5b10f0a7fa89" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3083.627694] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/6670d9b8-a339-4e0a-82f2-5b10f0a7fa89" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3083.628014] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-662e0595-faf3-42df-af10-ca256818c528 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3083.632872] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3083.632872] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525abe53-a7d6-7ffa-23a3-89f41c60462c" [ 3083.632872] env[61663]: _type = "Task" [ 3083.632872] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3083.641351] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525abe53-a7d6-7ffa-23a3-89f41c60462c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3084.145931] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525abe53-a7d6-7ffa-23a3-89f41c60462c, 'name': SearchDatastore_Task, 'duration_secs': 0.008779} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3084.146556] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/6670d9b8-a339-4e0a-82f2-5b10f0a7fa89/ts-2024-12-01-04-40-09 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3084.146704] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ca4f4a7-ae48-49aa-a6ce-6067606d99dd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3084.160381] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/6670d9b8-a339-4e0a-82f2-5b10f0a7fa89/ts-2024-12-01-04-40-09 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3084.160546] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 6670d9b8-a339-4e0a-82f2-5b10f0a7fa89 is no longer used by this node. Pending deletion! [ 3084.160711] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/6670d9b8-a339-4e0a-82f2-5b10f0a7fa89" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3084.160931] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/fb0470fa-ad60-478a-90ff-8818b16d702d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3084.161068] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/fb0470fa-ad60-478a-90ff-8818b16d702d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3084.161393] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/fb0470fa-ad60-478a-90ff-8818b16d702d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3084.161683] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-adb1f54a-72f5-4141-9fe2-8e75a8fbeb8b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3084.175031] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3084.175031] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c3348e-3533-f181-d508-dbf5a86cbae4" [ 3084.175031] env[61663]: _type = "Task" [ 3084.175031] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3084.180388] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c3348e-3533-f181-d508-dbf5a86cbae4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3084.688076] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c3348e-3533-f181-d508-dbf5a86cbae4, 'name': SearchDatastore_Task, 'duration_secs': 0.009355} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3084.688076] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/fb0470fa-ad60-478a-90ff-8818b16d702d is no longer used. Deleting! [ 3084.688076] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/fb0470fa-ad60-478a-90ff-8818b16d702d {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3084.688076] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a584fbba-abaf-4fb6-9352-4dd490e2637b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3084.692687] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3084.692687] env[61663]: value = "task-1690960" [ 3084.692687] env[61663]: _type = "Task" [ 3084.692687] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3084.701163] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690960, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3085.203725] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690960, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.115657} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3085.204146] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3085.204146] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/fb0470fa-ad60-478a-90ff-8818b16d702d" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3085.204399] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/074d14cf-4e54-4001-8ed9-4aebcce1fc56" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3085.204485] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/074d14cf-4e54-4001-8ed9-4aebcce1fc56" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3085.204786] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/074d14cf-4e54-4001-8ed9-4aebcce1fc56" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3085.205062] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-833eb1e1-a645-4639-a6c7-f962e4900c81 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3085.209582] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3085.209582] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52af433a-4617-2820-595f-a8a4f5ee66de" [ 3085.209582] env[61663]: _type = "Task" [ 3085.209582] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3085.217512] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52af433a-4617-2820-595f-a8a4f5ee66de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3085.720870] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52af433a-4617-2820-595f-a8a4f5ee66de, 'name': SearchDatastore_Task, 'duration_secs': 0.009403} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3085.721186] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/074d14cf-4e54-4001-8ed9-4aebcce1fc56 is no longer used. Deleting! [ 3085.721331] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/074d14cf-4e54-4001-8ed9-4aebcce1fc56 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3085.721607] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81d49e2c-2e7e-4dc3-914e-3082727ce020 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3085.730098] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3085.730098] env[61663]: value = "task-1690961" [ 3085.730098] env[61663]: _type = "Task" [ 3085.730098] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3085.740987] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690961, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3086.239761] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690961, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.110671} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3086.240282] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3086.240282] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/074d14cf-4e54-4001-8ed9-4aebcce1fc56" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3086.240428] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/e379829b-719e-4cfd-9545-f99e87d7b704" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3086.240520] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/e379829b-719e-4cfd-9545-f99e87d7b704" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3086.240839] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/e379829b-719e-4cfd-9545-f99e87d7b704" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3086.241106] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41756054-fd60-4ca8-b79f-5a6799f588b4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3086.245166] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3086.245166] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528295ab-80f8-b26b-b606-b4623f3e4381" [ 3086.245166] env[61663]: _type = "Task" [ 3086.245166] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3086.252258] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528295ab-80f8-b26b-b606-b4623f3e4381, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3086.755797] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528295ab-80f8-b26b-b606-b4623f3e4381, 'name': SearchDatastore_Task, 'duration_secs': 0.00943} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3086.756093] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/e379829b-719e-4cfd-9545-f99e87d7b704/ts-2024-12-01-04-40-12 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3086.756361] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5430931f-02bf-493c-8aa1-bbab20b89f2b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3086.772203] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/e379829b-719e-4cfd-9545-f99e87d7b704/ts-2024-12-01-04-40-12 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3086.772363] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image e379829b-719e-4cfd-9545-f99e87d7b704 is no longer used by this node. Pending deletion! [ 3086.772523] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/e379829b-719e-4cfd-9545-f99e87d7b704" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3086.772743] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/d31f4473-c51c-4050-a140-362e830b0dbd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3086.772863] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/d31f4473-c51c-4050-a140-362e830b0dbd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3086.773194] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/d31f4473-c51c-4050-a140-362e830b0dbd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3086.773465] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c65d0d7-2790-4b84-acaa-a87f6370f2da {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3086.782252] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3086.782252] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5236a32d-a9ab-1b48-1e90-fa6b42238b49" [ 3086.782252] env[61663]: _type = "Task" [ 3086.782252] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3086.792097] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5236a32d-a9ab-1b48-1e90-fa6b42238b49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3087.293105] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5236a32d-a9ab-1b48-1e90-fa6b42238b49, 'name': SearchDatastore_Task, 'duration_secs': 0.008912} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3087.293422] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/d31f4473-c51c-4050-a140-362e830b0dbd/ts-2024-12-01-04-40-13 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3087.293707] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-35f9b436-fe27-40a1-9ab1-252160cfc8cd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3087.305695] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/d31f4473-c51c-4050-a140-362e830b0dbd/ts-2024-12-01-04-40-13 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3087.305864] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image d31f4473-c51c-4050-a140-362e830b0dbd is no longer used by this node. Pending deletion! [ 3087.305994] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/d31f4473-c51c-4050-a140-362e830b0dbd" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3087.306227] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/aecb2e60-7e62-4017-92e3-975491a9e019" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3087.306347] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/aecb2e60-7e62-4017-92e3-975491a9e019" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3087.306667] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/aecb2e60-7e62-4017-92e3-975491a9e019" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3087.306911] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5cfb84f-016a-4b1d-98b6-5fdff425957c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3087.311374] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3087.311374] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b87a78-6add-1ce4-0ef9-197440d39141" [ 3087.311374] env[61663]: _type = "Task" [ 3087.311374] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3087.318862] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b87a78-6add-1ce4-0ef9-197440d39141, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3087.822436] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b87a78-6add-1ce4-0ef9-197440d39141, 'name': SearchDatastore_Task, 'duration_secs': 0.008308} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3087.822608] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/aecb2e60-7e62-4017-92e3-975491a9e019/ts-2024-12-01-04-40-13 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3087.822755] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1d08af3-3e1c-4f89-a571-06348ee01e2c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3087.834556] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/aecb2e60-7e62-4017-92e3-975491a9e019/ts-2024-12-01-04-40-13 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3087.834715] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image aecb2e60-7e62-4017-92e3-975491a9e019 is no longer used by this node. Pending deletion! [ 3087.834858] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/aecb2e60-7e62-4017-92e3-975491a9e019" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3087.835384] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/04f2daf6-13d7-4152-aa5a-1f38ca482095" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3087.835522] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/04f2daf6-13d7-4152-aa5a-1f38ca482095" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3087.835818] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/04f2daf6-13d7-4152-aa5a-1f38ca482095" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3087.836080] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4236dd2a-db73-4238-ab02-84b4f35c2e7c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3087.840210] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3087.840210] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f33db7-78f4-5677-b498-316d27035f88" [ 3087.840210] env[61663]: _type = "Task" [ 3087.840210] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3087.847812] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f33db7-78f4-5677-b498-316d27035f88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3088.351275] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f33db7-78f4-5677-b498-316d27035f88, 'name': SearchDatastore_Task, 'duration_secs': 0.007683} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3088.351614] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/04f2daf6-13d7-4152-aa5a-1f38ca482095/ts-2024-12-01-04-40-14 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3088.351762] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07f8ad07-a9af-4513-ab59-0f078da17764 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3088.363594] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/04f2daf6-13d7-4152-aa5a-1f38ca482095/ts-2024-12-01-04-40-14 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3088.363790] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 04f2daf6-13d7-4152-aa5a-1f38ca482095 is no longer used by this node. Pending deletion! [ 3088.363891] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/04f2daf6-13d7-4152-aa5a-1f38ca482095" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3088.364117] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/460d788b-bffd-4261-ab5c-48cf6c53814f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3088.364242] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/460d788b-bffd-4261-ab5c-48cf6c53814f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3088.364561] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/460d788b-bffd-4261-ab5c-48cf6c53814f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3088.364783] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98301b63-adf0-465b-a138-459dd43c735e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3088.368784] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3088.368784] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52762d49-aa5a-21bf-a19c-608ad6d36d8b" [ 3088.368784] env[61663]: _type = "Task" [ 3088.368784] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3088.376164] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52762d49-aa5a-21bf-a19c-608ad6d36d8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3088.880147] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52762d49-aa5a-21bf-a19c-608ad6d36d8b, 'name': SearchDatastore_Task, 'duration_secs': 0.007753} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3088.880471] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/460d788b-bffd-4261-ab5c-48cf6c53814f/ts-2024-12-01-04-40-14 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3088.880744] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-92771806-b9f3-43f2-8e2b-3d7b5752cd87 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3088.893870] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/460d788b-bffd-4261-ab5c-48cf6c53814f/ts-2024-12-01-04-40-14 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3088.894018] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 460d788b-bffd-4261-ab5c-48cf6c53814f is no longer used by this node. Pending deletion! [ 3088.894187] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/460d788b-bffd-4261-ab5c-48cf6c53814f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3088.894407] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/99a1286c-8ab6-4cf9-804b-c383ee3dc416" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3088.894526] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/99a1286c-8ab6-4cf9-804b-c383ee3dc416" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3088.894846] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/99a1286c-8ab6-4cf9-804b-c383ee3dc416" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3088.895102] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5872a09-5b0f-4e17-89db-7e5d25b88ec3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3088.899529] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3088.899529] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522f5cd2-2656-98d6-7157-63a64a2c4272" [ 3088.899529] env[61663]: _type = "Task" [ 3088.899529] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3088.906933] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522f5cd2-2656-98d6-7157-63a64a2c4272, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3089.411056] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522f5cd2-2656-98d6-7157-63a64a2c4272, 'name': SearchDatastore_Task, 'duration_secs': 0.007801} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3089.411438] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/99a1286c-8ab6-4cf9-804b-c383ee3dc416/ts-2024-12-01-04-40-15 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3089.411438] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bae4129a-3cfb-43ff-85bd-3415176345a2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3089.423171] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/99a1286c-8ab6-4cf9-804b-c383ee3dc416/ts-2024-12-01-04-40-15 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3089.423313] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 99a1286c-8ab6-4cf9-804b-c383ee3dc416 is no longer used by this node. Pending deletion! [ 3089.423472] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/99a1286c-8ab6-4cf9-804b-c383ee3dc416" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3089.423772] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/b81bba2e-eb11-45af-9ce0-dad7243cba95" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3089.423897] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/b81bba2e-eb11-45af-9ce0-dad7243cba95" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3089.424219] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/b81bba2e-eb11-45af-9ce0-dad7243cba95" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3089.424442] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddc547c1-cd94-4459-b870-de38ad7399ad {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3089.428577] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3089.428577] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ed1b29-0b61-cde6-ba86-bf1e2e10fba5" [ 3089.428577] env[61663]: _type = "Task" [ 3089.428577] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3089.435739] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ed1b29-0b61-cde6-ba86-bf1e2e10fba5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3089.940176] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52ed1b29-0b61-cde6-ba86-bf1e2e10fba5, 'name': SearchDatastore_Task, 'duration_secs': 0.007838} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3089.940176] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/b81bba2e-eb11-45af-9ce0-dad7243cba95/ts-2024-12-01-04-40-15 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3089.940176] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ff726988-d13f-484d-8565-e6e031afaed1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3089.951303] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/b81bba2e-eb11-45af-9ce0-dad7243cba95/ts-2024-12-01-04-40-15 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3089.951448] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image b81bba2e-eb11-45af-9ce0-dad7243cba95 is no longer used by this node. Pending deletion! [ 3089.951626] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/b81bba2e-eb11-45af-9ce0-dad7243cba95" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3089.951853] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/54e58a8f-a716-490a-9811-f9404590d034" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3089.951971] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/54e58a8f-a716-490a-9811-f9404590d034" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3089.952305] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/54e58a8f-a716-490a-9811-f9404590d034" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3089.952542] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fda03cf-c2db-432a-9aae-61cea6bd6043 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3089.956982] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3089.956982] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52095a37-3b92-85a2-4f98-6f8621e54923" [ 3089.956982] env[61663]: _type = "Task" [ 3089.956982] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3089.964436] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52095a37-3b92-85a2-4f98-6f8621e54923, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3090.467713] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52095a37-3b92-85a2-4f98-6f8621e54923, 'name': SearchDatastore_Task, 'duration_secs': 0.007886} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3090.468133] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/54e58a8f-a716-490a-9811-f9404590d034/ts-2024-12-01-04-40-16 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3090.468466] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b517c367-ff04-46af-8ab2-2ec8ed5d74e7 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3090.480514] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/54e58a8f-a716-490a-9811-f9404590d034/ts-2024-12-01-04-40-16 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3090.480664] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 54e58a8f-a716-490a-9811-f9404590d034 is no longer used by this node. Pending deletion! [ 3090.480858] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/54e58a8f-a716-490a-9811-f9404590d034" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3090.481092] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/0e624580-63c4-4d24-b13b-c9defad3fc36" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3090.481214] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/0e624580-63c4-4d24-b13b-c9defad3fc36" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3090.481519] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/0e624580-63c4-4d24-b13b-c9defad3fc36" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3090.481765] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b468ab47-805a-48cf-956a-2300ef02a0e2 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3090.485860] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3090.485860] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a331d2-2cca-2540-cf68-e7be7baaacfd" [ 3090.485860] env[61663]: _type = "Task" [ 3090.485860] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3090.493088] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a331d2-2cca-2540-cf68-e7be7baaacfd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3090.996159] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a331d2-2cca-2540-cf68-e7be7baaacfd, 'name': SearchDatastore_Task, 'duration_secs': 0.007894} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3090.996387] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/0e624580-63c4-4d24-b13b-c9defad3fc36/ts-2024-12-01-04-40-16 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3090.996641] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4d4aacff-74a1-4381-b904-bc74a0940c0b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3091.008636] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/0e624580-63c4-4d24-b13b-c9defad3fc36/ts-2024-12-01-04-40-16 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3091.008771] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 0e624580-63c4-4d24-b13b-c9defad3fc36 is no longer used by this node. Pending deletion! [ 3091.008939] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/0e624580-63c4-4d24-b13b-c9defad3fc36" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3091.009165] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/4758829c-941e-44ae-8ddb-66982ff66ae2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3091.009310] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/4758829c-941e-44ae-8ddb-66982ff66ae2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3091.009623] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/4758829c-941e-44ae-8ddb-66982ff66ae2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3091.009859] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25cf4af1-f4a0-4f0c-baeb-22415ba2292a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3091.013956] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3091.013956] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52198558-a83e-f839-0191-29820be8e534" [ 3091.013956] env[61663]: _type = "Task" [ 3091.013956] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3091.021179] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52198558-a83e-f839-0191-29820be8e534, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3091.525156] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52198558-a83e-f839-0191-29820be8e534, 'name': SearchDatastore_Task, 'duration_secs': 0.0093} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3091.525457] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/4758829c-941e-44ae-8ddb-66982ff66ae2 is no longer used. Deleting! [ 3091.525586] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/4758829c-941e-44ae-8ddb-66982ff66ae2 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3091.525853] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d27ecb99-1b1a-4563-b36d-008355ec3b52 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3091.532076] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3091.532076] env[61663]: value = "task-1690962" [ 3091.532076] env[61663]: _type = "Task" [ 3091.532076] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3091.540058] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690962, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3092.042610] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690962, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.108154} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3092.042887] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3092.043063] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/4758829c-941e-44ae-8ddb-66982ff66ae2" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3092.043253] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/4c8f79f3-5a26-400f-b809-b9ebbad1e0b5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3092.043351] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/4c8f79f3-5a26-400f-b809-b9ebbad1e0b5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3092.043687] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/4c8f79f3-5a26-400f-b809-b9ebbad1e0b5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3092.043948] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9b379dc-7f5d-49af-b9db-0bea0ddf324b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3092.048109] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3092.048109] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b104cd-1082-1af6-4b36-84c9c06ee366" [ 3092.048109] env[61663]: _type = "Task" [ 3092.048109] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3092.056458] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b104cd-1082-1af6-4b36-84c9c06ee366, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3092.558161] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b104cd-1082-1af6-4b36-84c9c06ee366, 'name': SearchDatastore_Task, 'duration_secs': 0.00928} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3092.558450] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/4c8f79f3-5a26-400f-b809-b9ebbad1e0b5/ts-2024-12-01-04-40-18 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3092.558678] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a552508-629f-4048-b401-df3689739e85 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3092.571049] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/4c8f79f3-5a26-400f-b809-b9ebbad1e0b5/ts-2024-12-01-04-40-18 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3092.571199] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 4c8f79f3-5a26-400f-b809-b9ebbad1e0b5 is no longer used by this node. Pending deletion! [ 3092.571363] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/4c8f79f3-5a26-400f-b809-b9ebbad1e0b5" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3092.571577] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/bbd9f2aa-86e6-4f1d-aeff-30df232bd953" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3092.571695] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/bbd9f2aa-86e6-4f1d-aeff-30df232bd953" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3092.572000] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/bbd9f2aa-86e6-4f1d-aeff-30df232bd953" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3092.572247] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75fe1933-1968-45f5-9e02-92a3cd2fee2b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3092.576119] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3092.576119] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52588516-b674-2f04-22b6-2c084cd23476" [ 3092.576119] env[61663]: _type = "Task" [ 3092.576119] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3092.583437] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52588516-b674-2f04-22b6-2c084cd23476, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3093.086944] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52588516-b674-2f04-22b6-2c084cd23476, 'name': SearchDatastore_Task, 'duration_secs': 0.007974} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3093.087261] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/bbd9f2aa-86e6-4f1d-aeff-30df232bd953/ts-2024-12-01-04-40-18 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3093.087531] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf0ea10e-042e-45bd-9a82-dafe14180e87 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3093.098732] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/bbd9f2aa-86e6-4f1d-aeff-30df232bd953/ts-2024-12-01-04-40-18 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3093.098899] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image bbd9f2aa-86e6-4f1d-aeff-30df232bd953 is no longer used by this node. Pending deletion! [ 3093.099098] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/bbd9f2aa-86e6-4f1d-aeff-30df232bd953" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3093.099358] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/8758ea8c-5ac9-44a7-905e-7a220464c4cc" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3093.099488] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/8758ea8c-5ac9-44a7-905e-7a220464c4cc" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3093.099845] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/8758ea8c-5ac9-44a7-905e-7a220464c4cc" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3093.100089] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99e3eb36-59ef-466a-b5b1-847246a7d651 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3093.104183] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3093.104183] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528186e0-b648-aec3-702e-972569011fdc" [ 3093.104183] env[61663]: _type = "Task" [ 3093.104183] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3093.111814] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528186e0-b648-aec3-702e-972569011fdc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3093.614554] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528186e0-b648-aec3-702e-972569011fdc, 'name': SearchDatastore_Task, 'duration_secs': 0.007952} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3093.614896] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/8758ea8c-5ac9-44a7-905e-7a220464c4cc/ts-2024-12-01-04-40-19 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3093.615094] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8d36e16e-f8e7-4455-9d32-38cf11b69c5f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3093.627353] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/8758ea8c-5ac9-44a7-905e-7a220464c4cc/ts-2024-12-01-04-40-19 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3093.627507] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 8758ea8c-5ac9-44a7-905e-7a220464c4cc is no longer used by this node. Pending deletion! [ 3093.627667] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/8758ea8c-5ac9-44a7-905e-7a220464c4cc" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3093.627878] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/1a0d6f64-128c-41ba-b02c-c02838b95c34" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3093.627997] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/1a0d6f64-128c-41ba-b02c-c02838b95c34" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3093.628315] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/1a0d6f64-128c-41ba-b02c-c02838b95c34" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3093.628555] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-026a3b9b-4843-4a89-80ff-8be40efa08a6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3093.632666] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3093.632666] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e61ac3-8906-7960-a659-ebec7274da7d" [ 3093.632666] env[61663]: _type = "Task" [ 3093.632666] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3093.640549] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e61ac3-8906-7960-a659-ebec7274da7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3094.142508] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52e61ac3-8906-7960-a659-ebec7274da7d, 'name': SearchDatastore_Task, 'duration_secs': 0.007836} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3094.142766] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/1a0d6f64-128c-41ba-b02c-c02838b95c34/ts-2024-12-01-04-40-19 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3094.143027] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a214a0ac-7b0a-4d24-a24f-713cfc41da11 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3094.154988] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/1a0d6f64-128c-41ba-b02c-c02838b95c34/ts-2024-12-01-04-40-19 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3094.155152] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 1a0d6f64-128c-41ba-b02c-c02838b95c34 is no longer used by this node. Pending deletion! [ 3094.155319] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/1a0d6f64-128c-41ba-b02c-c02838b95c34" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3094.155544] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/88dab96f-8b7c-406f-b256-d5f7d5c0b437" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3094.155667] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/88dab96f-8b7c-406f-b256-d5f7d5c0b437" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3094.155978] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/88dab96f-8b7c-406f-b256-d5f7d5c0b437" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3094.156218] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4129a95-3cb8-4b02-aafd-59cc113d36c9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3094.160498] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3094.160498] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a3836d-3e55-45fd-9a09-62ef717068cc" [ 3094.160498] env[61663]: _type = "Task" [ 3094.160498] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3094.167658] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a3836d-3e55-45fd-9a09-62ef717068cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3094.671175] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a3836d-3e55-45fd-9a09-62ef717068cc, 'name': SearchDatastore_Task, 'duration_secs': 0.008137} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3094.671490] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/88dab96f-8b7c-406f-b256-d5f7d5c0b437 is no longer used. Deleting! [ 3094.671626] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/88dab96f-8b7c-406f-b256-d5f7d5c0b437 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3094.671887] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3cbb0964-41a0-46ee-8107-30394bc03d7e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3094.678354] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3094.678354] env[61663]: value = "task-1690963" [ 3094.678354] env[61663]: _type = "Task" [ 3094.678354] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3094.686183] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690963, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3095.188915] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690963, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.10724} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3095.189148] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3095.189322] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/88dab96f-8b7c-406f-b256-d5f7d5c0b437" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3095.189574] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/7c96100c-341f-4b84-8426-db14a5e89e57" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3095.189696] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/7c96100c-341f-4b84-8426-db14a5e89e57" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3095.190029] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/7c96100c-341f-4b84-8426-db14a5e89e57" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3095.190293] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab031328-b4f4-4266-9ee7-065d22e64f8d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3095.194476] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3095.194476] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522682d5-a47e-cbc2-09fd-0e668baccf0f" [ 3095.194476] env[61663]: _type = "Task" [ 3095.194476] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3095.201849] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522682d5-a47e-cbc2-09fd-0e668baccf0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3095.704464] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522682d5-a47e-cbc2-09fd-0e668baccf0f, 'name': SearchDatastore_Task, 'duration_secs': 0.009243} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3095.704749] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/7c96100c-341f-4b84-8426-db14a5e89e57/ts-2024-12-01-04-40-21 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3095.704991] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-427fc5e8-8cd3-4167-9522-294d50985564 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3095.716563] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/7c96100c-341f-4b84-8426-db14a5e89e57/ts-2024-12-01-04-40-21 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3095.716704] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 7c96100c-341f-4b84-8426-db14a5e89e57 is no longer used by this node. Pending deletion! [ 3095.716866] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/7c96100c-341f-4b84-8426-db14a5e89e57" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3095.717103] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/b9c6cf3f-76e5-47dc-a9b4-9e188c06129f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3095.717223] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/b9c6cf3f-76e5-47dc-a9b4-9e188c06129f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3095.717537] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/b9c6cf3f-76e5-47dc-a9b4-9e188c06129f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3095.717822] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e25a3d96-39d6-442f-91f7-c603442d5b8f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3095.722810] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3095.722810] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527ebdba-8eb0-25c4-a8c3-461e197eb769" [ 3095.722810] env[61663]: _type = "Task" [ 3095.722810] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3095.729876] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527ebdba-8eb0-25c4-a8c3-461e197eb769, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3096.236598] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]527ebdba-8eb0-25c4-a8c3-461e197eb769, 'name': SearchDatastore_Task, 'duration_secs': 0.007717} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3096.236598] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/b9c6cf3f-76e5-47dc-a9b4-9e188c06129f/ts-2024-12-01-04-40-22 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3096.236598] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-288f9e63-ca34-49c3-932b-8273557687b1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3096.248385] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/b9c6cf3f-76e5-47dc-a9b4-9e188c06129f/ts-2024-12-01-04-40-22 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3096.248385] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image b9c6cf3f-76e5-47dc-a9b4-9e188c06129f is no longer used by this node. Pending deletion! [ 3096.248385] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/b9c6cf3f-76e5-47dc-a9b4-9e188c06129f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3096.248704] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/2efd3144-bb5b-4a23-90cf-be57ecf69aa4" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3096.248704] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/2efd3144-bb5b-4a23-90cf-be57ecf69aa4" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3096.248826] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/2efd3144-bb5b-4a23-90cf-be57ecf69aa4" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3096.249109] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b12f1729-110a-4d8c-a4f7-c465cef8a504 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3096.253590] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3096.253590] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526e81ee-0a17-74ca-4269-f79593edfd79" [ 3096.253590] env[61663]: _type = "Task" [ 3096.253590] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3096.261547] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526e81ee-0a17-74ca-4269-f79593edfd79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3096.765478] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]526e81ee-0a17-74ca-4269-f79593edfd79, 'name': SearchDatastore_Task, 'duration_secs': 0.007863} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3096.765799] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/2efd3144-bb5b-4a23-90cf-be57ecf69aa4/ts-2024-12-01-04-40-22 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3096.765984] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba760f6c-13d0-4b40-843d-1f3edad004d8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3096.777155] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/2efd3144-bb5b-4a23-90cf-be57ecf69aa4/ts-2024-12-01-04-40-22 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3096.777579] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 2efd3144-bb5b-4a23-90cf-be57ecf69aa4 is no longer used by this node. Pending deletion! [ 3096.777579] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/2efd3144-bb5b-4a23-90cf-be57ecf69aa4" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3096.777707] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/d7f8e561-f83d-45e7-8346-adf0696f31ac" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3096.777806] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/d7f8e561-f83d-45e7-8346-adf0696f31ac" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3096.778139] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/d7f8e561-f83d-45e7-8346-adf0696f31ac" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3096.778379] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c02cf04-2ae2-40f9-9e66-e3592251565e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3096.782507] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3096.782507] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f36baf-128a-a9eb-51e9-87b5773ad2ba" [ 3096.782507] env[61663]: _type = "Task" [ 3096.782507] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3096.789533] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f36baf-128a-a9eb-51e9-87b5773ad2ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3097.293995] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f36baf-128a-a9eb-51e9-87b5773ad2ba, 'name': SearchDatastore_Task, 'duration_secs': 0.008052} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3097.293995] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/d7f8e561-f83d-45e7-8346-adf0696f31ac is no longer used. Deleting! [ 3097.294211] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/d7f8e561-f83d-45e7-8346-adf0696f31ac {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3097.294323] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bb9a4b3f-a012-46a9-b3a5-3971bc811cae {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3097.300755] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3097.300755] env[61663]: value = "task-1690964" [ 3097.300755] env[61663]: _type = "Task" [ 3097.300755] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3097.308731] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690964, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3097.810713] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690964, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.112675} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3097.812217] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3097.812217] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/d7f8e561-f83d-45e7-8346-adf0696f31ac" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3097.812217] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/3a3f1aa6-6a15-439a-a267-c99249d38c7b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3097.812217] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/3a3f1aa6-6a15-439a-a267-c99249d38c7b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3097.812217] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/3a3f1aa6-6a15-439a-a267-c99249d38c7b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3097.812442] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48d0dd46-b5b7-4cca-ac52-b2ab7dddaea3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3097.816637] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3097.816637] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528d83ef-0906-0b94-691f-96ef08a1053d" [ 3097.816637] env[61663]: _type = "Task" [ 3097.816637] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3097.823855] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528d83ef-0906-0b94-691f-96ef08a1053d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3098.327237] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528d83ef-0906-0b94-691f-96ef08a1053d, 'name': SearchDatastore_Task, 'duration_secs': 0.008293} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3098.327575] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/3a3f1aa6-6a15-439a-a267-c99249d38c7b/ts-2024-12-01-04-40-24 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3098.327850] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-509e2706-96b9-41d1-88a4-ab1c7664d276 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3098.338747] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/3a3f1aa6-6a15-439a-a267-c99249d38c7b/ts-2024-12-01-04-40-24 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3098.338888] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 3a3f1aa6-6a15-439a-a267-c99249d38c7b is no longer used by this node. Pending deletion! [ 3098.339062] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/3a3f1aa6-6a15-439a-a267-c99249d38c7b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3098.339276] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/aae5069f-3967-411a-935c-95fdec3500b0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3098.339451] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/aae5069f-3967-411a-935c-95fdec3500b0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3098.339752] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/aae5069f-3967-411a-935c-95fdec3500b0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3098.339974] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34de043d-3fd2-4665-b56c-13ae103fe076 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3098.343811] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3098.343811] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a42768-9db4-e7e1-d7aa-90eead64bfd3" [ 3098.343811] env[61663]: _type = "Task" [ 3098.343811] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3098.351054] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a42768-9db4-e7e1-d7aa-90eead64bfd3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3098.755616] env[61663]: WARNING oslo_vmware.rw_handles [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 3098.755616] env[61663]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 3098.755616] env[61663]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 3098.755616] env[61663]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 3098.755616] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 3098.755616] env[61663]: ERROR oslo_vmware.rw_handles response.begin() [ 3098.755616] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 3098.755616] env[61663]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 3098.755616] env[61663]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 3098.755616] env[61663]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 3098.755616] env[61663]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 3098.755616] env[61663]: ERROR oslo_vmware.rw_handles [ 3098.756085] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Downloaded image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to vmware_temp/5cacf96e-97c6-417c-be98-99df0fc06cde/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 3098.758481] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Caching image {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 3098.758792] env[61663]: DEBUG nova.virt.vmwareapi.vm_util [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Copying Virtual Disk [datastore1] vmware_temp/5cacf96e-97c6-417c-be98-99df0fc06cde/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk to [datastore1] vmware_temp/5cacf96e-97c6-417c-be98-99df0fc06cde/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk {{(pid=61663) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 3098.759142] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-82da26d3-812c-46c9-8015-cdfc70df4718 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3098.767672] env[61663]: DEBUG oslo_vmware.api [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for the task: (returnval){ [ 3098.767672] env[61663]: value = "task-1690965" [ 3098.767672] env[61663]: _type = "Task" [ 3098.767672] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3098.775666] env[61663]: DEBUG oslo_vmware.api [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Task: {'id': task-1690965, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3098.853417] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a42768-9db4-e7e1-d7aa-90eead64bfd3, 'name': SearchDatastore_Task, 'duration_secs': 0.007721} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3098.853764] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/aae5069f-3967-411a-935c-95fdec3500b0/ts-2024-12-01-04-40-24 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3098.853930] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e56ca8e0-1371-4b09-9126-86f9fa9fb7b0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3098.866447] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/aae5069f-3967-411a-935c-95fdec3500b0/ts-2024-12-01-04-40-24 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3098.866575] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image aae5069f-3967-411a-935c-95fdec3500b0 is no longer used by this node. Pending deletion! [ 3098.866739] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/aae5069f-3967-411a-935c-95fdec3500b0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3098.866951] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/15a4568f-ffa6-43d3-b448-2446f50d6d99" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3098.867080] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/15a4568f-ffa6-43d3-b448-2446f50d6d99" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3098.867436] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/15a4568f-ffa6-43d3-b448-2446f50d6d99" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3098.867644] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31b64ca6-57cc-42a1-a3cb-7a1d0bbdb449 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3098.871528] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3098.871528] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520356b2-a807-b834-ebe9-3cc968c66671" [ 3098.871528] env[61663]: _type = "Task" [ 3098.871528] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3098.879147] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520356b2-a807-b834-ebe9-3cc968c66671, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3099.277765] env[61663]: DEBUG oslo_vmware.exceptions [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Fault InvalidArgument not matched. {{(pid=61663) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 3099.278053] env[61663]: DEBUG oslo_concurrency.lockutils [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Releasing lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3099.278602] env[61663]: ERROR nova.compute.manager [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3099.278602] env[61663]: Faults: ['InvalidArgument'] [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Traceback (most recent call last): [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] yield resources [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] self.driver.spawn(context, instance, image_meta, [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] self._fetch_image_if_missing(context, vi) [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] image_cache(vi, tmp_image_ds_loc) [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] vm_util.copy_virtual_disk( [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] session._wait_for_task(vmdk_copy_task) [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] return self.wait_for_task(task_ref) [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] return evt.wait() [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] result = hub.switch() [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] return self.greenlet.switch() [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] self.f(*self.args, **self.kw) [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] raise exceptions.translate_fault(task_info.error) [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Faults: ['InvalidArgument'] [ 3099.278602] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] [ 3099.279595] env[61663]: INFO nova.compute.manager [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Terminating instance [ 3099.280576] env[61663]: DEBUG oslo_concurrency.lockutils [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Acquired lock "[datastore1] devstack-image-cache_base/362c8152-fcd0-4f43-acbf-09a2dc376cb2/362c8152-fcd0-4f43-acbf-09a2dc376cb2.vmdk" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3099.280798] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3099.281049] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03c87e3f-829a-43b2-ba6c-0a76e6d8c682 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3099.283340] env[61663]: DEBUG nova.compute.manager [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 3099.283536] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 3099.284251] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77379f2-8225-469c-8540-22e3263b8fd4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3099.290846] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Unregistering the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 3099.291088] env[61663]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c0f9328f-54cd-4ab3-8a1d-566e5df0ef9d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3099.293077] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3099.293255] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61663) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 3099.294211] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6ee0a63-10be-47dd-b153-36f4e38b663a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3099.299324] env[61663]: DEBUG oslo_vmware.api [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Waiting for the task: (returnval){ [ 3099.299324] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5273758a-b810-61cf-f4e4-8413cbaee0f6" [ 3099.299324] env[61663]: _type = "Task" [ 3099.299324] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3099.308117] env[61663]: DEBUG oslo_vmware.api [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5273758a-b810-61cf-f4e4-8413cbaee0f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3099.361220] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Unregistered the VM {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 3099.361436] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Deleting contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 3099.361619] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Deleting the datastore file [datastore1] 0266b3f5-ee31-46d7-af5e-844a27bfd829 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3099.361886] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7453b3c3-0265-4a88-b8d9-cc4fe880ef9b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3099.368026] env[61663]: DEBUG oslo_vmware.api [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for the task: (returnval){ [ 3099.368026] env[61663]: value = "task-1690967" [ 3099.368026] env[61663]: _type = "Task" [ 3099.368026] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3099.378244] env[61663]: DEBUG oslo_vmware.api [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Task: {'id': task-1690967, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3099.383222] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]520356b2-a807-b834-ebe9-3cc968c66671, 'name': SearchDatastore_Task, 'duration_secs': 0.007669} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3099.383466] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/15a4568f-ffa6-43d3-b448-2446f50d6d99/ts-2024-12-01-04-40-25 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3099.383697] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4cc3cfb-5c8a-4c43-a70c-973c410d256f {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3099.401866] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/15a4568f-ffa6-43d3-b448-2446f50d6d99/ts-2024-12-01-04-40-25 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3099.402033] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 15a4568f-ffa6-43d3-b448-2446f50d6d99 is no longer used by this node. Pending deletion! [ 3099.402206] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/15a4568f-ffa6-43d3-b448-2446f50d6d99" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3099.402427] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/e373a635-5113-439d-9e32-9d5edf19972e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3099.402544] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/e373a635-5113-439d-9e32-9d5edf19972e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3099.402854] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/e373a635-5113-439d-9e32-9d5edf19972e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3099.403105] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62b25a82-f420-4639-8831-b3b1529185ec {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3099.406969] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3099.406969] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a953c1-e1d0-ed7f-8629-1ff306a90489" [ 3099.406969] env[61663]: _type = "Task" [ 3099.406969] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3099.413967] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a953c1-e1d0-ed7f-8629-1ff306a90489, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3099.810019] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Preparing fetch location {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 3099.810287] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Creating directory with path [datastore1] vmware_temp/4979c4ec-e922-4203-a742-1bb8f74a70e6/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3099.810553] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-37aaf85c-445e-495a-9602-9e157cbec53a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3099.821510] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Created directory with path [datastore1] vmware_temp/4979c4ec-e922-4203-a742-1bb8f74a70e6/362c8152-fcd0-4f43-acbf-09a2dc376cb2 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3099.821655] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Fetch image to [datastore1] vmware_temp/4979c4ec-e922-4203-a742-1bb8f74a70e6/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk {{(pid=61663) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 3099.821874] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to [datastore1] vmware_temp/4979c4ec-e922-4203-a742-1bb8f74a70e6/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk on the data store datastore1 {{(pid=61663) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 3099.822609] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-996a7aca-8191-4959-b44c-70dd7a045a49 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3099.828954] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b505a4f1-5228-4ee1-80f5-58ac324fdc04 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3099.837766] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c01db269-d641-41b9-b938-bb3b63b9ccc3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3099.868750] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d49df51-0bfc-40cf-89a7-0ba8da8878fd {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3099.879716] env[61663]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1ea1046b-c3c8-40ec-a153-a244c1513cd1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3099.881401] env[61663]: DEBUG oslo_vmware.api [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Task: {'id': task-1690967, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073486} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3099.881640] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3099.881828] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Deleted contents of the VM from datastore datastore1 {{(pid=61663) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 3099.881994] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 3099.882190] env[61663]: INFO nova.compute.manager [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Took 0.60 seconds to destroy the instance on the hypervisor. [ 3099.884279] env[61663]: DEBUG nova.compute.claims [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Aborting claim: {{(pid=61663) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 3099.884449] env[61663]: DEBUG oslo_concurrency.lockutils [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3099.884662] env[61663]: DEBUG oslo_concurrency.lockutils [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3099.906533] env[61663]: DEBUG nova.virt.vmwareapi.images [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] [instance: 911c036c-c7d8-4ff7-b874-335361fb5281] Downloading image file data 362c8152-fcd0-4f43-acbf-09a2dc376cb2 to the data store datastore1 {{(pid=61663) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 3099.919577] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52a953c1-e1d0-ed7f-8629-1ff306a90489, 'name': SearchDatastore_Task, 'duration_secs': 0.007483} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3099.919723] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/e373a635-5113-439d-9e32-9d5edf19972e/ts-2024-12-01-04-40-25 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3099.921028] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-61b1f1c9-4fba-4715-a776-f4ceb6d45e7d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3099.931584] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/e373a635-5113-439d-9e32-9d5edf19972e/ts-2024-12-01-04-40-25 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3099.931736] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image e373a635-5113-439d-9e32-9d5edf19972e is no longer used by this node. Pending deletion! [ 3099.931883] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/e373a635-5113-439d-9e32-9d5edf19972e" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3099.932112] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/2c06492e-50aa-4a2f-bd98-e9ed0f6aa1d8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3099.932233] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/2c06492e-50aa-4a2f-bd98-e9ed0f6aa1d8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3099.932554] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/2c06492e-50aa-4a2f-bd98-e9ed0f6aa1d8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3099.932808] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9cff8c3d-7e53-40d9-b71f-3efa0b2cc57c {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3099.937020] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3099.937020] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528d7fd0-60d4-5071-09c5-5179c3dd889d" [ 3099.937020] env[61663]: _type = "Task" [ 3099.937020] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3099.944316] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528d7fd0-60d4-5071-09c5-5179c3dd889d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3099.978711] env[61663]: DEBUG oslo_vmware.rw_handles [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4979c4ec-e922-4203-a742-1bb8f74a70e6/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 3100.040609] env[61663]: DEBUG oslo_vmware.rw_handles [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Completed reading data from the image iterator. {{(pid=61663) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 3100.040804] env[61663]: DEBUG oslo_vmware.rw_handles [None req-52f86f7e-0052-4605-a5b8-3fca0baeffff tempest-ServersTestJSON-1545653383 tempest-ServersTestJSON-1545653383-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4979c4ec-e922-4203-a742-1bb8f74a70e6/362c8152-fcd0-4f43-acbf-09a2dc376cb2/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61663) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 3100.139986] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9baf2a98-7363-4e43-959d-0608cd9c01c9 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3100.147167] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2391b078-2fc5-4cd7-be37-fd16ccf97192 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3100.176202] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e60dfc5b-c297-44ac-932f-c86cb720a8f6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3100.183644] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb991372-6a20-4d1f-8717-1fafb5b441ee {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3100.197827] env[61663]: DEBUG nova.compute.provider_tree [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Inventory has not changed in ProviderTree for provider: b47d006d-a9bd-461e-a5d9-39811f005278 {{(pid=61663) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 3100.209564] env[61663]: DEBUG nova.scheduler.client.report [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Inventory has not changed for provider b47d006d-a9bd-461e-a5d9-39811f005278 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 183, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61663) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 3100.232722] env[61663]: DEBUG oslo_concurrency.lockutils [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.348s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3100.233281] env[61663]: ERROR nova.compute.manager [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3100.233281] env[61663]: Faults: ['InvalidArgument'] [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Traceback (most recent call last): [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] self.driver.spawn(context, instance, image_meta, [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] self._fetch_image_if_missing(context, vi) [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] image_cache(vi, tmp_image_ds_loc) [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] vm_util.copy_virtual_disk( [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] session._wait_for_task(vmdk_copy_task) [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] return self.wait_for_task(task_ref) [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] return evt.wait() [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] result = hub.switch() [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] return self.greenlet.switch() [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] self.f(*self.args, **self.kw) [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] raise exceptions.translate_fault(task_info.error) [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Faults: ['InvalidArgument'] [ 3100.233281] env[61663]: ERROR nova.compute.manager [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] [ 3100.234358] env[61663]: DEBUG nova.compute.utils [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] VimFaultException {{(pid=61663) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 3100.235475] env[61663]: DEBUG nova.compute.manager [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Build of instance 0266b3f5-ee31-46d7-af5e-844a27bfd829 was re-scheduled: A specified parameter was not correct: fileType [ 3100.235475] env[61663]: Faults: ['InvalidArgument'] {{(pid=61663) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 3100.235868] env[61663]: DEBUG nova.compute.manager [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Unplugging VIFs for instance {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 3100.236055] env[61663]: DEBUG nova.compute.manager [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61663) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 3100.236232] env[61663]: DEBUG nova.compute.manager [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 3100.236396] env[61663]: DEBUG nova.network.neutron [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 3100.447133] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528d7fd0-60d4-5071-09c5-5179c3dd889d, 'name': SearchDatastore_Task, 'duration_secs': 0.009397} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3100.447419] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/2c06492e-50aa-4a2f-bd98-e9ed0f6aa1d8/ts-2024-12-01-04-40-26 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3100.447666] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1596eb87-52fd-4011-b7da-c2f9dbb4544e {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3100.461873] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/2c06492e-50aa-4a2f-bd98-e9ed0f6aa1d8/ts-2024-12-01-04-40-26 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3100.462043] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 2c06492e-50aa-4a2f-bd98-e9ed0f6aa1d8 is no longer used by this node. Pending deletion! [ 3100.463550] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/2c06492e-50aa-4a2f-bd98-e9ed0f6aa1d8" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3100.463550] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/515742b1-502a-4bbc-9185-3aa46c87d313" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3100.463550] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/515742b1-502a-4bbc-9185-3aa46c87d313" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3100.463550] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/515742b1-502a-4bbc-9185-3aa46c87d313" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3100.463550] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24da660b-ea8b-491c-9e3a-173ebbde3494 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3100.467280] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3100.467280] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c0b19f-f4a6-ab4f-4e6b-ec789bec1cb3" [ 3100.467280] env[61663]: _type = "Task" [ 3100.467280] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3100.475180] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c0b19f-f4a6-ab4f-4e6b-ec789bec1cb3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3100.699810] env[61663]: DEBUG nova.network.neutron [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3100.715761] env[61663]: INFO nova.compute.manager [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Took 0.48 seconds to deallocate network for instance. [ 3100.846850] env[61663]: INFO nova.scheduler.client.report [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Deleted allocations for instance 0266b3f5-ee31-46d7-af5e-844a27bfd829 [ 3100.886628] env[61663]: DEBUG oslo_concurrency.lockutils [None req-af9a89e0-a326-4d74-a258-8bc30c679e9e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "0266b3f5-ee31-46d7-af5e-844a27bfd829" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 664.044s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3100.886957] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3cb0ba0b-1d97-4494-b95c-fd7cb90b460e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "0266b3f5-ee31-46d7-af5e-844a27bfd829" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 467.741s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3100.887177] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3cb0ba0b-1d97-4494-b95c-fd7cb90b460e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Acquiring lock "0266b3f5-ee31-46d7-af5e-844a27bfd829-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3100.887391] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3cb0ba0b-1d97-4494-b95c-fd7cb90b460e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "0266b3f5-ee31-46d7-af5e-844a27bfd829-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3100.887564] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3cb0ba0b-1d97-4494-b95c-fd7cb90b460e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "0266b3f5-ee31-46d7-af5e-844a27bfd829-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3100.889615] env[61663]: INFO nova.compute.manager [None req-3cb0ba0b-1d97-4494-b95c-fd7cb90b460e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Terminating instance [ 3100.891751] env[61663]: DEBUG nova.compute.manager [None req-3cb0ba0b-1d97-4494-b95c-fd7cb90b460e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Start destroying the instance on the hypervisor. {{(pid=61663) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 3100.892078] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb0ba0b-1d97-4494-b95c-fd7cb90b460e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Destroying instance {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 3100.892702] env[61663]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4e129ec8-b162-4695-8419-f62dcbfcba78 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3100.901920] env[61663]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76783cd4-709c-4122-93e2-2b7222108a5b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3100.929297] env[61663]: WARNING nova.virt.vmwareapi.vmops [None req-3cb0ba0b-1d97-4494-b95c-fd7cb90b460e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0266b3f5-ee31-46d7-af5e-844a27bfd829 could not be found. [ 3100.929535] env[61663]: DEBUG nova.virt.vmwareapi.vmops [None req-3cb0ba0b-1d97-4494-b95c-fd7cb90b460e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Instance destroyed {{(pid=61663) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 3100.929731] env[61663]: INFO nova.compute.manager [None req-3cb0ba0b-1d97-4494-b95c-fd7cb90b460e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Took 0.04 seconds to destroy the instance on the hypervisor. [ 3100.929974] env[61663]: DEBUG oslo.service.loopingcall [None req-3cb0ba0b-1d97-4494-b95c-fd7cb90b460e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61663) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 3100.930259] env[61663]: DEBUG nova.compute.manager [-] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Deallocating network for instance {{(pid=61663) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 3100.930373] env[61663]: DEBUG nova.network.neutron [-] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] deallocate_for_instance() {{(pid=61663) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 3100.955790] env[61663]: DEBUG nova.network.neutron [-] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Updating instance_info_cache with network_info: [] {{(pid=61663) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3100.963888] env[61663]: INFO nova.compute.manager [-] [instance: 0266b3f5-ee31-46d7-af5e-844a27bfd829] Took 0.03 seconds to deallocate network for instance. [ 3100.979610] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52c0b19f-f4a6-ab4f-4e6b-ec789bec1cb3, 'name': SearchDatastore_Task, 'duration_secs': 0.008168} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3100.979952] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/515742b1-502a-4bbc-9185-3aa46c87d313/ts-2024-12-01-04-40-26 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3100.980289] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a1298ec8-cb0a-48fb-b615-746504be4b11 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3100.993490] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/515742b1-502a-4bbc-9185-3aa46c87d313/ts-2024-12-01-04-40-26 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3100.993684] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 515742b1-502a-4bbc-9185-3aa46c87d313 is no longer used by this node. Pending deletion! [ 3100.993906] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/515742b1-502a-4bbc-9185-3aa46c87d313" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3100.994221] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/af6b2d10-0494-4330-8b86-7ad24bc25862" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3100.994394] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/af6b2d10-0494-4330-8b86-7ad24bc25862" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3100.994815] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/af6b2d10-0494-4330-8b86-7ad24bc25862" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3100.995163] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e1893fb-e5e9-40cc-9c9a-78871b78e561 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3101.004375] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3101.004375] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5227539e-7968-1da0-ba7c-ac96937a9ab7" [ 3101.004375] env[61663]: _type = "Task" [ 3101.004375] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3101.018314] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5227539e-7968-1da0-ba7c-ac96937a9ab7, 'name': SearchDatastore_Task, 'duration_secs': 0.008141} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3101.021328] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/af6b2d10-0494-4330-8b86-7ad24bc25862/ts-2024-12-01-04-40-26 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3101.021731] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b9503175-8b85-43cd-83ef-9002fb359b8d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3101.033683] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/af6b2d10-0494-4330-8b86-7ad24bc25862/ts-2024-12-01-04-40-26 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3101.033829] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image af6b2d10-0494-4330-8b86-7ad24bc25862 is no longer used by this node. Pending deletion! [ 3101.033987] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/af6b2d10-0494-4330-8b86-7ad24bc25862" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3101.034206] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/18221d75-d2a3-4bb8-b122-6e2da235b59f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3101.034324] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/18221d75-d2a3-4bb8-b122-6e2da235b59f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3101.034619] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/18221d75-d2a3-4bb8-b122-6e2da235b59f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3101.034859] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28bb388d-e2dd-428e-be74-99f72b2a2b14 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3101.038884] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3101.038884] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b69184-8c74-0b66-c492-3092c7506a3f" [ 3101.038884] env[61663]: _type = "Task" [ 3101.038884] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3101.046102] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b69184-8c74-0b66-c492-3092c7506a3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3101.078998] env[61663]: DEBUG oslo_concurrency.lockutils [None req-3cb0ba0b-1d97-4494-b95c-fd7cb90b460e tempest-ServerDiskConfigTestJSON-1721079730 tempest-ServerDiskConfigTestJSON-1721079730-project-member] Lock "0266b3f5-ee31-46d7-af5e-844a27bfd829" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.192s {{(pid=61663) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3101.549618] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b69184-8c74-0b66-c492-3092c7506a3f, 'name': SearchDatastore_Task, 'duration_secs': 0.00936} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3101.549960] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/18221d75-d2a3-4bb8-b122-6e2da235b59f is no longer used. Deleting! [ 3101.550138] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/18221d75-d2a3-4bb8-b122-6e2da235b59f {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3101.550425] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-065050c5-a8a8-42d2-844c-109121786176 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3101.557492] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3101.557492] env[61663]: value = "task-1690968" [ 3101.557492] env[61663]: _type = "Task" [ 3101.557492] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3101.565140] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690968, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3102.066987] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690968, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.115109} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3102.067369] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3102.067369] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/18221d75-d2a3-4bb8-b122-6e2da235b59f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3102.067604] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/0f720d04-176d-42ae-b0dc-38abd70220e7" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3102.067723] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/0f720d04-176d-42ae-b0dc-38abd70220e7" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3102.068051] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/0f720d04-176d-42ae-b0dc-38abd70220e7" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3102.068313] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-766ac19e-3697-4a79-ba25-f04de9c17a93 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3102.072307] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3102.072307] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522e0620-2f2e-161b-be1f-7885f4795de4" [ 3102.072307] env[61663]: _type = "Task" [ 3102.072307] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3102.079253] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522e0620-2f2e-161b-be1f-7885f4795de4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3102.582444] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522e0620-2f2e-161b-be1f-7885f4795de4, 'name': SearchDatastore_Task, 'duration_secs': 0.023277} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3102.582713] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/0f720d04-176d-42ae-b0dc-38abd70220e7/ts-2024-12-01-04-40-28 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3102.582967] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b4fd876-def4-49b4-b25b-191d0af508fe {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3102.594793] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/0f720d04-176d-42ae-b0dc-38abd70220e7/ts-2024-12-01-04-40-28 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3102.594962] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 0f720d04-176d-42ae-b0dc-38abd70220e7 is no longer used by this node. Pending deletion! [ 3102.595111] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/0f720d04-176d-42ae-b0dc-38abd70220e7" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3102.595327] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/8d6e49ca-8186-4e75-95f7-88b685041058" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3102.595447] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/8d6e49ca-8186-4e75-95f7-88b685041058" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3102.595784] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/8d6e49ca-8186-4e75-95f7-88b685041058" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3102.596047] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d92bea9a-cb24-4868-bb49-a6b555ac03a0 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3102.600171] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3102.600171] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522cff49-dd40-7ee7-7f84-86a837d6834d" [ 3102.600171] env[61663]: _type = "Task" [ 3102.600171] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3102.607595] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522cff49-dd40-7ee7-7f84-86a837d6834d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3103.110666] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]522cff49-dd40-7ee7-7f84-86a837d6834d, 'name': SearchDatastore_Task, 'duration_secs': 0.008278} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3103.111024] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/8d6e49ca-8186-4e75-95f7-88b685041058/ts-2024-12-01-04-40-28 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3103.111193] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2917550-f5ea-42a0-a2fb-fa5dc390651a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3103.122826] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/8d6e49ca-8186-4e75-95f7-88b685041058/ts-2024-12-01-04-40-28 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3103.122959] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 8d6e49ca-8186-4e75-95f7-88b685041058 is no longer used by this node. Pending deletion! [ 3103.123140] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/8d6e49ca-8186-4e75-95f7-88b685041058" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3103.123348] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/01d451fb-703d-43fb-8353-11933e47666f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3103.123468] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/01d451fb-703d-43fb-8353-11933e47666f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3103.123783] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/01d451fb-703d-43fb-8353-11933e47666f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3103.124033] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4467f69e-da6a-4d66-8590-692a36dc0fa1 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3103.127869] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3103.127869] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52cfb51b-7144-a310-5ad4-af5cfd59b80a" [ 3103.127869] env[61663]: _type = "Task" [ 3103.127869] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3103.134739] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52cfb51b-7144-a310-5ad4-af5cfd59b80a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3103.638618] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52cfb51b-7144-a310-5ad4-af5cfd59b80a, 'name': SearchDatastore_Task, 'duration_secs': 0.00758} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3103.638901] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/01d451fb-703d-43fb-8353-11933e47666f/ts-2024-12-01-04-40-29 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3103.639172] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-77f38154-f182-4510-962c-488175811365 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3103.651372] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/01d451fb-703d-43fb-8353-11933e47666f/ts-2024-12-01-04-40-29 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3103.651518] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 01d451fb-703d-43fb-8353-11933e47666f is no longer used by this node. Pending deletion! [ 3103.651677] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/01d451fb-703d-43fb-8353-11933e47666f" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3103.651983] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/8beaeb24-e0c7-4cda-beb3-ed8f9c0b0d47" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3103.652119] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/8beaeb24-e0c7-4cda-beb3-ed8f9c0b0d47" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3103.652445] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/8beaeb24-e0c7-4cda-beb3-ed8f9c0b0d47" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3103.652717] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7038094d-78f2-4ffa-bca7-f045467a7027 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3103.656923] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3103.656923] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b3af34-e879-4fd0-d8d1-e3b74560e3d1" [ 3103.656923] env[61663]: _type = "Task" [ 3103.656923] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3103.664414] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b3af34-e879-4fd0-d8d1-e3b74560e3d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3104.167528] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52b3af34-e879-4fd0-d8d1-e3b74560e3d1, 'name': SearchDatastore_Task, 'duration_secs': 0.009451} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3104.167821] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/8beaeb24-e0c7-4cda-beb3-ed8f9c0b0d47/ts-2024-12-01-04-40-30 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3104.168019] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e0d35d1-1419-4bd0-b56d-781f6c9525c8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3104.181343] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/8beaeb24-e0c7-4cda-beb3-ed8f9c0b0d47/ts-2024-12-01-04-40-30 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3104.181491] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 8beaeb24-e0c7-4cda-beb3-ed8f9c0b0d47 is no longer used by this node. Pending deletion! [ 3104.181649] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/8beaeb24-e0c7-4cda-beb3-ed8f9c0b0d47" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3104.181869] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/4286820d-b14d-4dda-97a7-ca789e2b0c99" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3104.181980] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/4286820d-b14d-4dda-97a7-ca789e2b0c99" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3104.182299] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/4286820d-b14d-4dda-97a7-ca789e2b0c99" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3104.182531] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12cb04ad-f305-4688-acd6-efdf168f9cbe {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3104.186811] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3104.186811] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52dbe038-b5c0-952e-cd39-ecca4aacd727" [ 3104.186811] env[61663]: _type = "Task" [ 3104.186811] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3104.193770] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52dbe038-b5c0-952e-cd39-ecca4aacd727, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3104.698293] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52dbe038-b5c0-952e-cd39-ecca4aacd727, 'name': SearchDatastore_Task, 'duration_secs': 0.008008} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3104.698555] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/4286820d-b14d-4dda-97a7-ca789e2b0c99/ts-2024-12-01-04-40-30 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3104.698824] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cceb6969-87bf-4c92-aade-4e3dc8b4a0a6 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3104.710970] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/4286820d-b14d-4dda-97a7-ca789e2b0c99/ts-2024-12-01-04-40-30 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3104.711121] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 4286820d-b14d-4dda-97a7-ca789e2b0c99 is no longer used by this node. Pending deletion! [ 3104.711290] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/4286820d-b14d-4dda-97a7-ca789e2b0c99" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3104.711509] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/87c70d8c-8da8-462e-9340-b79b70ecd542" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3104.711629] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/87c70d8c-8da8-462e-9340-b79b70ecd542" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3104.711957] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/87c70d8c-8da8-462e-9340-b79b70ecd542" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3104.712229] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae528814-94a5-4ad6-b108-88f8b418a5cc {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3104.716708] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3104.716708] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5219557e-f96c-f75e-a59a-44f7db3ed2d1" [ 3104.716708] env[61663]: _type = "Task" [ 3104.716708] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3104.724122] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5219557e-f96c-f75e-a59a-44f7db3ed2d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3105.226959] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5219557e-f96c-f75e-a59a-44f7db3ed2d1, 'name': SearchDatastore_Task, 'duration_secs': 0.009264} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3105.227293] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/87c70d8c-8da8-462e-9340-b79b70ecd542/ts-2024-12-01-04-40-31 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3105.227541] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e260b4f7-eed6-44d3-80cb-3051ffc01e77 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3105.239967] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/87c70d8c-8da8-462e-9340-b79b70ecd542/ts-2024-12-01-04-40-31 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3105.240160] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 87c70d8c-8da8-462e-9340-b79b70ecd542 is no longer used by this node. Pending deletion! [ 3105.240351] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/87c70d8c-8da8-462e-9340-b79b70ecd542" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3105.240600] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/d082ebc7-ca8d-40fd-9efb-d8d65fd3a622" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3105.240802] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/d082ebc7-ca8d-40fd-9efb-d8d65fd3a622" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3105.241164] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/d082ebc7-ca8d-40fd-9efb-d8d65fd3a622" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3105.241443] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8de36967-c1b6-4d68-ba1e-45c7b9c8808b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3105.246077] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3105.246077] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52753d83-1a7c-4143-4cce-a193caa70b07" [ 3105.246077] env[61663]: _type = "Task" [ 3105.246077] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3105.253811] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52753d83-1a7c-4143-4cce-a193caa70b07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3105.757387] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52753d83-1a7c-4143-4cce-a193caa70b07, 'name': SearchDatastore_Task, 'duration_secs': 0.009092} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3105.757717] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/d082ebc7-ca8d-40fd-9efb-d8d65fd3a622 is no longer used. Deleting! [ 3105.757866] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/d082ebc7-ca8d-40fd-9efb-d8d65fd3a622 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3105.758130] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-018162ff-db10-456e-a815-ff1eba5765e8 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3105.763763] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3105.763763] env[61663]: value = "task-1690969" [ 3105.763763] env[61663]: _type = "Task" [ 3105.763763] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3105.770846] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690969, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3106.273325] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690969, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.108026} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3106.273633] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3106.273669] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/d082ebc7-ca8d-40fd-9efb-d8d65fd3a622" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3106.273902] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/7687c5b8-aee5-4285-8341-6b41c16ffc70" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3106.274013] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/7687c5b8-aee5-4285-8341-6b41c16ffc70" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3106.274348] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/7687c5b8-aee5-4285-8341-6b41c16ffc70" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3106.274619] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f2106d0-32e8-4dd3-9123-8ab0441add4b {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3106.279055] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3106.279055] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5254dd35-206b-9e1f-11e0-f4213f2d0412" [ 3106.279055] env[61663]: _type = "Task" [ 3106.279055] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3106.286329] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5254dd35-206b-9e1f-11e0-f4213f2d0412, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3106.789927] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5254dd35-206b-9e1f-11e0-f4213f2d0412, 'name': SearchDatastore_Task, 'duration_secs': 0.008718} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3106.790214] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/7687c5b8-aee5-4285-8341-6b41c16ffc70/ts-2024-12-01-04-40-32 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3106.790473] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00844f87-2144-45a7-843d-c1b307eb9d74 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3106.802145] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/7687c5b8-aee5-4285-8341-6b41c16ffc70/ts-2024-12-01-04-40-32 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3106.802288] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 7687c5b8-aee5-4285-8341-6b41c16ffc70 is no longer used by this node. Pending deletion! [ 3106.802435] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/7687c5b8-aee5-4285-8341-6b41c16ffc70" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3106.802643] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/b27b1e4e-18ee-473e-8892-12bbd8704be0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3106.802760] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/b27b1e4e-18ee-473e-8892-12bbd8704be0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3106.803073] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/b27b1e4e-18ee-473e-8892-12bbd8704be0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3106.803300] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-427a4ea1-308e-4069-a2cb-98d3336a7030 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3106.807487] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3106.807487] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f76c43-59df-9524-cfe7-8063dc939527" [ 3106.807487] env[61663]: _type = "Task" [ 3106.807487] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3106.814700] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f76c43-59df-9524-cfe7-8063dc939527, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3107.318593] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52f76c43-59df-9524-cfe7-8063dc939527, 'name': SearchDatastore_Task, 'duration_secs': 0.007855} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3107.318921] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/b27b1e4e-18ee-473e-8892-12bbd8704be0/ts-2024-12-01-04-40-33 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3107.319140] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e339e9a-df49-45ef-843e-4e52510c110a {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3107.330621] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/b27b1e4e-18ee-473e-8892-12bbd8704be0/ts-2024-12-01-04-40-33 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3107.330763] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image b27b1e4e-18ee-473e-8892-12bbd8704be0 is no longer used by this node. Pending deletion! [ 3107.330947] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/b27b1e4e-18ee-473e-8892-12bbd8704be0" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3107.331201] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/267f0459-605d-4925-8de2-2cdd5b5e0112" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3107.331325] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/267f0459-605d-4925-8de2-2cdd5b5e0112" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3107.331639] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/267f0459-605d-4925-8de2-2cdd5b5e0112" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3107.331885] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce518471-df8e-49cb-8f16-4eaade0eca69 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3107.336120] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3107.336120] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d3a603-6e4c-d30c-6373-9b3ab84ccd22" [ 3107.336120] env[61663]: _type = "Task" [ 3107.336120] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3107.343258] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d3a603-6e4c-d30c-6373-9b3ab84ccd22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3107.847875] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52d3a603-6e4c-d30c-6373-9b3ab84ccd22, 'name': SearchDatastore_Task, 'duration_secs': 0.008691} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3107.848231] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image [datastore2] devstack-image-cache_base/267f0459-605d-4925-8de2-2cdd5b5e0112 is no longer used. Deleting! [ 3107.848395] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleting the datastore file [datastore2] devstack-image-cache_base/267f0459-605d-4925-8de2-2cdd5b5e0112 {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3107.848636] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-46851789-6011-45d9-8b3c-6f06d459bdea {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3107.855487] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3107.855487] env[61663]: value = "task-1690970" [ 3107.855487] env[61663]: _type = "Task" [ 3107.855487] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3107.862739] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690970, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3108.365301] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': task-1690970, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127852} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3108.365563] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Deleted the datastore file {{(pid=61663) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3108.365676] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/267f0459-605d-4925-8de2-2cdd5b5e0112" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3108.365890] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/78cf89a0-8c04-4a53-b150-f89e4ebc0f2b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3108.366017] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/78cf89a0-8c04-4a53-b150-f89e4ebc0f2b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3108.366345] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/78cf89a0-8c04-4a53-b150-f89e4ebc0f2b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3108.366603] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31bf0373-bb7b-4f74-bf4d-a625c90b0895 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3108.370967] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3108.370967] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52cde5b2-2c67-49bd-e8cb-4ce603098615" [ 3108.370967] env[61663]: _type = "Task" [ 3108.370967] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3108.381280] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52cde5b2-2c67-49bd-e8cb-4ce603098615, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3108.881521] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52cde5b2-2c67-49bd-e8cb-4ce603098615, 'name': SearchDatastore_Task, 'duration_secs': 0.011244} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3108.881794] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/78cf89a0-8c04-4a53-b150-f89e4ebc0f2b/ts-2024-12-01-04-40-34 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3108.882071] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-04209320-a591-4c2d-8a7a-e0700814b442 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3108.897453] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/78cf89a0-8c04-4a53-b150-f89e4ebc0f2b/ts-2024-12-01-04-40-34 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3108.897649] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 78cf89a0-8c04-4a53-b150-f89e4ebc0f2b is no longer used by this node. Pending deletion! [ 3108.897780] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/78cf89a0-8c04-4a53-b150-f89e4ebc0f2b" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3108.897996] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/26e49958-1d65-40e9-b087-d8c251d657cb" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3108.898132] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/26e49958-1d65-40e9-b087-d8c251d657cb" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3108.898444] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/26e49958-1d65-40e9-b087-d8c251d657cb" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3108.898679] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e279106-8695-4be4-b89a-7c04a107463d {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3108.902734] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3108.902734] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528ebb62-580c-7e4c-eb64-0903f13b000a" [ 3108.902734] env[61663]: _type = "Task" [ 3108.902734] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3108.910064] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528ebb62-580c-7e4c-eb64-0903f13b000a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3109.412753] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]528ebb62-580c-7e4c-eb64-0903f13b000a, 'name': SearchDatastore_Task, 'duration_secs': 0.015876} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3109.413177] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/26e49958-1d65-40e9-b087-d8c251d657cb/ts-2024-12-01-04-40-35 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3109.413337] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-751a1e02-9afc-4169-9a97-403ed3f50c79 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3109.434332] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/26e49958-1d65-40e9-b087-d8c251d657cb/ts-2024-12-01-04-40-35 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3109.434485] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 26e49958-1d65-40e9-b087-d8c251d657cb is no longer used by this node. Pending deletion! [ 3109.434649] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/26e49958-1d65-40e9-b087-d8c251d657cb" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3109.434880] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/e2a96820-d768-4b62-b26d-cb965fed34e1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3109.435058] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/e2a96820-d768-4b62-b26d-cb965fed34e1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3109.435374] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/e2a96820-d768-4b62-b26d-cb965fed34e1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3109.435676] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8704a4b-e0fd-473e-b5f8-a880ff58a161 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3109.439676] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3109.439676] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5249cb93-f45e-7b10-255b-c83cb1537b0e" [ 3109.439676] env[61663]: _type = "Task" [ 3109.439676] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3109.447200] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5249cb93-f45e-7b10-255b-c83cb1537b0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3109.950109] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]5249cb93-f45e-7b10-255b-c83cb1537b0e, 'name': SearchDatastore_Task, 'duration_secs': 0.014434} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3109.950390] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/e2a96820-d768-4b62-b26d-cb965fed34e1/ts-2024-12-01-04-40-35 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3109.950636] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a1c4010f-3bd4-4587-a678-c934e28768a4 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3109.968580] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/e2a96820-d768-4b62-b26d-cb965fed34e1/ts-2024-12-01-04-40-35 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3109.968769] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image e2a96820-d768-4b62-b26d-cb965fed34e1 is no longer used by this node. Pending deletion! [ 3109.968932] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/e2a96820-d768-4b62-b26d-cb965fed34e1" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3109.969165] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/5cd3a363-b7dc-4dc5-b6ca-0df653ac855a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3109.969284] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/5cd3a363-b7dc-4dc5-b6ca-0df653ac855a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3109.969606] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/5cd3a363-b7dc-4dc5-b6ca-0df653ac855a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3109.969923] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0dbbc20e-a567-4529-aa49-f2a9e0441b19 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3109.974256] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3109.974256] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52fe34bd-48df-15e0-82c6-5699ab6b987e" [ 3109.974256] env[61663]: _type = "Task" [ 3109.974256] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3109.981815] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52fe34bd-48df-15e0-82c6-5699ab6b987e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3110.485664] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]52fe34bd-48df-15e0-82c6-5699ab6b987e, 'name': SearchDatastore_Task, 'duration_secs': 0.013491} completed successfully. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3110.485997] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Creating directory with path [datastore2] devstack-image-cache_base/5cd3a363-b7dc-4dc5-b6ca-0df653ac855a/ts-2024-12-01-04-40-36 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3110.486259] env[61663]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1288ca39-146e-4685-919b-e58a30bbb277 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3110.562842] env[61663]: DEBUG nova.virt.vmwareapi.ds_util [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Created directory with path [datastore2] devstack-image-cache_base/5cd3a363-b7dc-4dc5-b6ca-0df653ac855a/ts-2024-12-01-04-40-36 {{(pid=61663) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3110.563027] env[61663]: INFO nova.virt.vmwareapi.imagecache [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Image 5cd3a363-b7dc-4dc5-b6ca-0df653ac855a is no longer used by this node. Pending deletion! [ 3110.563171] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Releasing lock "[datastore2] devstack-image-cache_base/5cd3a363-b7dc-4dc5-b6ca-0df653ac855a" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3110.563397] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquiring lock "[datastore2] devstack-image-cache_base/3d70d47a-d9f6-483c-81c4-d54931bdeaeb" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3110.563517] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired lock "[datastore2] devstack-image-cache_base/3d70d47a-d9f6-483c-81c4-d54931bdeaeb" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3110.563854] env[61663]: DEBUG oslo_concurrency.lockutils [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Acquired external semaphore "[datastore2] devstack-image-cache_base/3d70d47a-d9f6-483c-81c4-d54931bdeaeb" {{(pid=61663) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3110.564293] env[61663]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fa53d02-1978-4909-a555-1db0033f0ac3 {{(pid=61663) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3110.569114] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Waiting for the task: (returnval){ [ 3110.569114] env[61663]: value = "session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525bf8c7-8c33-c62a-72fd-56121d2c2e22" [ 3110.569114] env[61663]: _type = "Task" [ 3110.569114] env[61663]: } to complete. {{(pid=61663) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3110.576835] env[61663]: DEBUG oslo_vmware.api [None req-d39473fe-5dbd-4645-a091-ae1657a82e8c None None] Task: {'id': session[5298e41d-4bf2-b9db-4ca1-fdce196b638b]525bf8c7-8c33-c62a-72fd-56121d2c2e22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61663) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}}